seawolf2357 commited on
Commit
89d1c84
·
verified ·
1 Parent(s): c5066e6

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +275 -0
app.py ADDED
@@ -0,0 +1,275 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ My Workflow App
3
+ A workflow application created with MOUSE Workflow builder.
4
+ Generated by MOUSE Workflow
5
+ """
6
+
7
+ import os
8
+ import json
9
+ import gradio as gr
10
+ import requests
11
+
12
+ # Workflow configuration
13
+ WORKFLOW_DATA = {
14
+ "nodes": [
15
+ {
16
+ "id": "input_1",
17
+ "type": "ChatInput",
18
+ "position": {
19
+ "x": 100,
20
+ "y": 200
21
+ },
22
+ "data": {
23
+ "label": "User Question",
24
+ "template": {
25
+ "input_value": {
26
+ "value": "What is the capital of Korea?"
27
+ }
28
+ }
29
+ }
30
+ },
31
+ {
32
+ "id": "llm_1",
33
+ "type": "llmNode",
34
+ "position": {
35
+ "x": 400,
36
+ "y": 200
37
+ },
38
+ "data": {
39
+ "label": "AI Processing",
40
+ "template": {
41
+ "provider": {
42
+ "value": "OpenAI"
43
+ },
44
+ "model": {
45
+ "value": "gpt-4.1-mini"
46
+ },
47
+ "temperature": {
48
+ "value": 0.7
49
+ },
50
+ "system_prompt": {
51
+ "value": "You are a helpful assistant."
52
+ }
53
+ }
54
+ }
55
+ },
56
+ {
57
+ "id": "output_1",
58
+ "type": "ChatOutput",
59
+ "position": {
60
+ "x": 700,
61
+ "y": 200
62
+ },
63
+ "data": {
64
+ "label": "Answer"
65
+ }
66
+ }
67
+ ],
68
+ "edges": [
69
+ {
70
+ "id": "e1",
71
+ "source": "input_1",
72
+ "target": "llm_1"
73
+ },
74
+ {
75
+ "id": "e2",
76
+ "source": "llm_1",
77
+ "target": "output_1"
78
+ }
79
+ ]
80
+ }
81
+
82
+ def execute_workflow(*input_values):
83
+ """Execute the workflow with given inputs"""
84
+
85
+ # API keys from environment
86
+ vidraft_token = os.getenv("FRIENDLI_TOKEN")
87
+ openai_key = os.getenv("OPENAI_API_KEY")
88
+
89
+ nodes = WORKFLOW_DATA.get("nodes", [])
90
+ edges = WORKFLOW_DATA.get("edges", [])
91
+
92
+ results = {}
93
+
94
+ # Get input nodes
95
+ input_nodes = [n for n in nodes if n.get("type") in ["ChatInput", "textInput", "Input", "numberInput"]]
96
+
97
+ # Map inputs to node IDs
98
+ for i, node in enumerate(input_nodes):
99
+ if i < len(input_values):
100
+ results[node["id"]] = input_values[i]
101
+
102
+ # Process nodes
103
+ for node in nodes:
104
+ node_id = node.get("id")
105
+ node_type = node.get("type", "")
106
+ node_data = node.get("data", {})
107
+ template = node_data.get("template", {})
108
+
109
+ if node_type == "textNode":
110
+ # Combine connected inputs
111
+ base_text = template.get("text", {}).get("value", "")
112
+ connected_inputs = []
113
+
114
+ for edge in edges:
115
+ if edge.get("target") == node_id:
116
+ source_id = edge.get("source")
117
+ if source_id in results:
118
+ connected_inputs.append(f"{source_id}: {results[source_id]}")
119
+
120
+ if connected_inputs:
121
+ results[node_id] = f"{base_text}\n\nInputs:\n" + "\n".join(connected_inputs)
122
+ else:
123
+ results[node_id] = base_text
124
+
125
+ elif node_type in ["llmNode", "OpenAIModel", "ChatModel"]:
126
+ # Get provider and model
127
+ provider = template.get("provider", {}).get("value", "OpenAI")
128
+ temperature = template.get("temperature", {}).get("value", 0.7)
129
+ system_prompt = template.get("system_prompt", {}).get("value", "")
130
+
131
+ # Get input text
132
+ input_text = ""
133
+ for edge in edges:
134
+ if edge.get("target") == node_id:
135
+ source_id = edge.get("source")
136
+ if source_id in results:
137
+ input_text = results[source_id]
138
+ break
139
+
140
+ # Call API
141
+ if provider == "OpenAI" and openai_key:
142
+ try:
143
+ from openai import OpenAI
144
+ client = OpenAI(api_key=openai_key)
145
+
146
+ messages = []
147
+ if system_prompt:
148
+ messages.append({"role": "system", "content": system_prompt})
149
+ messages.append({"role": "user", "content": input_text})
150
+
151
+ response = client.chat.completions.create(
152
+ model="gpt-4.1-mini",
153
+ messages=messages,
154
+ temperature=temperature,
155
+ max_tokens=1000
156
+ )
157
+
158
+ results[node_id] = response.choices[0].message.content
159
+ except Exception as e:
160
+ results[node_id] = f"[OpenAI Error: {str(e)}]"
161
+
162
+ elif provider == "VIDraft" and vidraft_token:
163
+ try:
164
+ headers = {
165
+ "Authorization": f"Bearer {vidraft_token}",
166
+ "Content-Type": "application/json"
167
+ }
168
+
169
+ messages = []
170
+ if system_prompt:
171
+ messages.append({"role": "system", "content": system_prompt})
172
+ messages.append({"role": "user", "content": input_text})
173
+
174
+ payload = {
175
+ "model": "dep89a2fld32mcm",
176
+ "messages": messages,
177
+ "max_tokens": 16384,
178
+ "temperature": temperature,
179
+ "top_p": 0.8,
180
+ "stream": False
181
+ }
182
+
183
+ response = requests.post(
184
+ "https://api.friendli.ai/dedicated/v1/chat/completions",
185
+ headers=headers,
186
+ json=payload,
187
+ timeout=30
188
+ )
189
+
190
+ if response.status_code == 200:
191
+ results[node_id] = response.json()["choices"][0]["message"]["content"]
192
+ else:
193
+ results[node_id] = f"[VIDraft Error: {response.status_code}]"
194
+ except Exception as e:
195
+ results[node_id] = f"[VIDraft Error: {str(e)}]"
196
+ else:
197
+ # Show which API key is missing
198
+ if provider == "OpenAI":
199
+ results[node_id] = "[OpenAI API key not found. Please set OPENAI_API_KEY in Space secrets]"
200
+ elif provider == "VIDraft":
201
+ results[node_id] = "[VIDraft API key not found. Please set FRIENDLI_TOKEN in Space secrets]"
202
+ else:
203
+ results[node_id] = f"[Simulated Response: {input_text[:50]}...]"
204
+
205
+ elif node_type in ["ChatOutput", "textOutput", "Output"]:
206
+ # Get connected result
207
+ for edge in edges:
208
+ if edge.get("target") == node_id:
209
+ source_id = edge.get("source")
210
+ if source_id in results:
211
+ results[node_id] = results[source_id]
212
+ break
213
+
214
+ # Return outputs
215
+ output_nodes = [n for n in nodes if n.get("type") in ["ChatOutput", "textOutput", "Output"]]
216
+ return [results.get(n["id"], "") for n in output_nodes]
217
+
218
+ # Build UI
219
+ with gr.Blocks(title="My Workflow App", theme=gr.themes.Soft()) as demo:
220
+ gr.Markdown("# My Workflow App")
221
+ gr.Markdown("A workflow application created with MOUSE Workflow builder.")
222
+
223
+ # API Status Check
224
+ vidraft_token = os.getenv("FRIENDLI_TOKEN")
225
+ openai_key = os.getenv("OPENAI_API_KEY")
226
+
227
+ if not vidraft_token and not openai_key:
228
+ gr.Markdown("""
229
+ ⚠️ **API Keys Required**
230
+
231
+ Please set the following environment variables in Space settings → Secrets:
232
+ - `FRIENDLI_TOKEN` for VIDraft (Gemma-3-r1984-27B)
233
+ - `OPENAI_API_KEY` for OpenAI (gpt-4.1-mini)
234
+ """)
235
+
236
+ # Extract nodes
237
+ nodes = WORKFLOW_DATA.get("nodes", [])
238
+ input_nodes = [n for n in nodes if n.get("type") in ["ChatInput", "textInput", "Input", "numberInput"]]
239
+ output_nodes = [n for n in nodes if n.get("type") in ["ChatOutput", "textOutput", "Output"]]
240
+
241
+ # Create inputs
242
+ inputs = []
243
+ if input_nodes:
244
+ gr.Markdown("### 📥 Inputs")
245
+ for node in input_nodes:
246
+ label = node.get("data", {}).get("label", node.get("id"))
247
+ template = node.get("data", {}).get("template", {})
248
+ default_value = template.get("input_value", {}).get("value", "")
249
+
250
+ if node.get("type") == "numberInput":
251
+ inp = gr.Number(label=label, value=float(default_value) if default_value else 0)
252
+ else:
253
+ inp = gr.Textbox(label=label, value=default_value, lines=2)
254
+ inputs.append(inp)
255
+
256
+ # Execute button
257
+ btn = gr.Button("🚀 Execute Workflow", variant="primary")
258
+
259
+ # Create outputs
260
+ outputs = []
261
+ if output_nodes:
262
+ gr.Markdown("### 📤 Outputs")
263
+ for node in output_nodes:
264
+ label = node.get("data", {}).get("label", node.get("id"))
265
+ out = gr.Textbox(label=label, interactive=False, lines=3)
266
+ outputs.append(out)
267
+
268
+ # Connect
269
+ btn.click(fn=execute_workflow, inputs=inputs, outputs=outputs)
270
+
271
+ gr.Markdown("---")
272
+ gr.Markdown("*Powered by MOUSE Workflow*")
273
+
274
+ if __name__ == "__main__":
275
+ demo.launch()