openfree commited on
Commit
a8b7f3d
·
verified ·
1 Parent(s): df7ca6d

Delete process_flow_generator.py

Browse files
Files changed (1) hide show
  1. process_flow_generator.py +0 -181
process_flow_generator.py DELETED
@@ -1,181 +0,0 @@
1
- import graphviz
2
- import json
3
- from tempfile import NamedTemporaryFile
4
- import os
5
-
6
- def generate_process_flow_diagram(json_input: str, output_format: str) -> str:
7
- """
8
- Generates a Process Flow Diagram (Flowchart) from JSON input.
9
-
10
- Args:
11
- json_input (str): A JSON string describing the process flow structure.
12
- It must follow the Expected JSON Format Example below.
13
-
14
- output_format (str): The output format for the generated diagram.
15
- Supported formats: "png" or "svg"
16
-
17
- Expected JSON Format Example:
18
- {
19
- "start_node": "Start Inference Request",
20
- "nodes": [
21
- {
22
- "id": "user_input",
23
- "label": "Receive User Input (Data)",
24
- "type": "io"
25
- },
26
- {
27
- "id": "preprocess_data",
28
- "label": "Preprocess Data",
29
- "type": "process"
30
- },
31
- {
32
- "id": "validate_data",
33
- "label": "Validate Data Format/Type",
34
- "type": "decision"
35
- },
36
- {
37
- "id": "data_valid_yes",
38
- "label": "Data Valid?",
39
- "type": "decision"
40
- },
41
- {
42
- "id": "load_model",
43
- "label": "Load AI Model (if not cached)",
44
- "type": "process"
45
- },
46
- {
47
- "id": "run_inference",
48
- "label": "Run AI Model Inference",
49
- "type": "process"
50
- },
51
- {
52
- "id": "postprocess_output",
53
- "label": "Postprocess Model Output",
54
- "type": "process"
55
- },
56
- {
57
- "id": "send_response",
58
- "label": "Send Response to User",
59
- "type": "io"
60
- },
61
- {
62
- "id": "log_error",
63
- "label": "Log Error & Notify User",
64
- "type": "process"
65
- },
66
- {
67
- "id": "end_inference_process",
68
- "label": "End Inference Process",
69
- "type": "end"
70
- }
71
- ],
72
- "connections": [
73
- {"from": "start_node", "to": "user_input", "label": "Request"},
74
- {"from": "user_input", "to": "preprocess_data", "label": "Data Received"},
75
- {"from": "preprocess_data", "to": "validate_data", "label": "Cleaned"},
76
- {"from": "validate_data", "to": "data_valid_yes", "label": "Check"},
77
- {"from": "data_valid_yes", "to": "load_model", "label": "Yes"},
78
- {"from": "data_valid_yes", "to": "log_error", "label": "No"},
79
- {"from": "load_model", "to": "run_inference", "label": "Model Ready"},
80
- {"from": "run_inference", "to": "postprocess_output", "label": "Output Generated"},
81
- {"from": "postprocess_output", "to": "send_response", "label": "Ready"},
82
- {"from": "send_response", "to": "end_inference_process", "label": "Response Sent"},
83
- {"from": "log_error", "to": "end_inference_process", "label": "Error Handled"}
84
- ]
85
- }
86
-
87
- Returns:
88
- str: The filepath to the generated image file.
89
- """
90
- try:
91
- if not json_input.strip():
92
- return "Error: Empty input"
93
-
94
- data = json.loads(json_input)
95
-
96
- if 'start_node' not in data or 'nodes' not in data or 'connections' not in data:
97
- raise ValueError("Missing required fields: 'start_node', 'nodes', or 'connections'")
98
-
99
- node_shapes = {
100
- "process": "box",
101
- "decision": "diamond",
102
- "start": "oval",
103
- "end": "oval",
104
- "io": "parallelogram",
105
- "document": "note",
106
- "default": "box"
107
- }
108
-
109
- node_colors = {
110
- "process": "#BEBEBE",
111
- "decision": "#FFF9C4",
112
- "start": "#A8E6CF",
113
- "end": "#FFB3BA",
114
- "io": "#B8D4F1",
115
- "document": "#F0F8FF",
116
- "default": "#BEBEBE"
117
- }
118
-
119
- dot = graphviz.Digraph(
120
- name='ProcessFlowDiagram',
121
- format='png',
122
- graph_attr={
123
- 'rankdir': 'TB',
124
- 'splines': 'ortho',
125
- 'bgcolor': 'white',
126
- 'pad': '0.5',
127
- 'nodesep': '0.6',
128
- 'ranksep': '0.8'
129
- }
130
- )
131
-
132
- all_defined_nodes = {node['id']: node for node in data['nodes']}
133
-
134
- start_node_id = data['start_node']
135
- dot.node(
136
- start_node_id,
137
- start_node_id,
138
- shape=node_shapes['start'],
139
- style='filled,rounded',
140
- fillcolor=node_colors['start'],
141
- fontcolor='black',
142
- fontsize='14'
143
- )
144
-
145
- for node_id, node_info in all_defined_nodes.items():
146
- if node_id == start_node_id:
147
- continue
148
-
149
- node_type = node_info.get("type", "default")
150
- shape = node_shapes.get(node_type, "box")
151
- color = node_colors.get(node_type, node_colors["default"])
152
- node_label = node_info['label']
153
-
154
- dot.node(
155
- node_id,
156
- node_label,
157
- shape=shape,
158
- style='filled,rounded',
159
- fillcolor=color,
160
- fontcolor='black',
161
- fontsize='14'
162
- )
163
-
164
- for connection in data['connections']:
165
- dot.edge(
166
- connection['from'],
167
- connection['to'],
168
- label=connection.get('label', ''),
169
- color='#4a4a4a',
170
- fontcolor='#4a4a4a',
171
- fontsize='10'
172
- )
173
-
174
- with NamedTemporaryFile(delete=False, suffix=f'.{output_format}') as tmp:
175
- dot.render(tmp.name, format=output_format, cleanup=True)
176
- return f"{tmp.name}.{output_format}"
177
-
178
- except json.JSONDecodeError:
179
- return "Error: Invalid JSON format"
180
- except Exception as e:
181
- return f"Error: {str(e)}"