Update app.py
Browse files
app.py
CHANGED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# voice_to_app.py - Core Creator Logic
|
2 |
+
|
3 |
+
from openai import OpenAI
|
4 |
+
from core_creator.intent_parser import classify_robot_idea
|
5 |
+
from core_creator.app_blueprint import generate_app_blueprint
|
6 |
+
from core_creator.code_generator import generate_app_code
|
7 |
+
from core_creator.assets_manager import fetch_visual_assets
|
8 |
+
|
9 |
+
class VoiceToAppCreator:
|
10 |
+
def __init__(self, voice_transcript: str):
|
11 |
+
self.voice_input = voice_transcript
|
12 |
+
self.intent = None
|
13 |
+
self.blueprint = None
|
14 |
+
self.generated_code = None
|
15 |
+
self.assets = None
|
16 |
+
|
17 |
+
def run_pipeline(self):
|
18 |
+
print("\n[π] Classifying robot intent...")
|
19 |
+
self.intent = classify_robot_idea(self.voice_input)
|
20 |
+
|
21 |
+
print(f"[π§ ] Detected intent: {self.intent}")
|
22 |
+
self.blueprint = generate_app_blueprint(self.voice_input, self.intent)
|
23 |
+
|
24 |
+
print("[βοΈ] Generating code from blueprint...")
|
25 |
+
self.generated_code = generate_app_code(self.blueprint)
|
26 |
+
|
27 |
+
print("[π¨] Fetching visual/audio assets...")
|
28 |
+
self.assets = fetch_visual_assets(self.intent)
|
29 |
+
|
30 |
+
print("[β
] Robot App creation complete.")
|
31 |
+
return {
|
32 |
+
"intent": self.intent,
|
33 |
+
"blueprint": self.blueprint,
|
34 |
+
"code": self.generated_code,
|
35 |
+
"assets": self.assets
|
36 |
+
}
|
37 |
+
|
38 |
+
# Example usage
|
39 |
+
if __name__ == "__main__":
|
40 |
+
user_idea = "Build a robot that teaches kids to brush their teeth with fun animations."
|
41 |
+
creator = VoiceToAppCreator(user_idea)
|
42 |
+
app_package = creator.run_pipeline()
|
43 |
+
|
44 |
+
print("\n--- Final App Package ---")
|
45 |
+
print(app_package["code"][:500]) # preview generated code
|