Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
import gradio as gr
|
2 |
-
from ontochat.functions import set_openai_api_key, user_story_generator, cq_generator, load_example_user_story, clustering_generator, ontology_testing, load_example
|
|
|
3 |
|
4 |
user_story_template = """**Persona:**\n\n- Name: -\n- Age: -\n- Occupation: -\n- Skills: -\n- Interests: -\n\n**Goal:**\n\n- Description: -\n- Keywords: -\n\n**Scenario:**\n\n- Before: -\n- During: -\n- After: -\n\n**Example Data:**\n\n- Category: -\n- Data: -\n\n**Resources:**\n\n- Resource Name: -\n- Link: -"""
|
5 |
|
@@ -8,17 +9,14 @@ with gr.Blocks() as set_api_key:
|
|
8 |
"""
|
9 |
# Welcome to OntoChat! ๐
|
10 |
|
11 |
-
**
|
12 |
|
13 |
-
I
|
14 |
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
- **Test and verify your ontology's design** without needing to write queries manually.
|
19 |
|
20 |
-
Let's work together to simplify your ontology engineering process!
|
21 |
-
Visit [OntoChat on GitHub](https://github.com/King-s-Knowledge-Graph-Lab/OntoChat) for more information.
|
22 |
"""
|
23 |
)
|
24 |
|
@@ -78,129 +76,130 @@ with gr.Blocks() as user_story_interface:
|
|
78 |
outputs=[user_story_input]
|
79 |
)
|
80 |
|
81 |
-
with gr.Blocks() as cq_interface:
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
clustering_interface = gr.Interface(
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
)
|
161 |
-
|
162 |
-
with gr.Blocks() as testing_interface:
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
|
202 |
demo = gr.TabbedInterface(
|
203 |
-
[set_api_key, user_story_interface, cq_interface, clustering_interface, testing_interface],
|
|
|
204 |
["Set API Key", "User Story Generation", "Competency Question Extraction", "Competency Question Analysis", "Ontology Testing"]
|
205 |
)
|
206 |
|
|
|
1 |
import gradio as gr
|
2 |
+
# from ontochat.functions import set_openai_api_key, user_story_generator, cq_generator, load_example_user_story, clustering_generator, ontology_testing, load_example
|
3 |
+
from ontochat.functions import set_openai_api_key, user_story_generator, load_example
|
4 |
|
5 |
user_story_template = """**Persona:**\n\n- Name: -\n- Age: -\n- Occupation: -\n- Skills: -\n- Interests: -\n\n**Goal:**\n\n- Description: -\n- Keywords: -\n\n**Scenario:**\n\n- Before: -\n- During: -\n- After: -\n\n**Example Data:**\n\n- Category: -\n- Data: -\n\n**Resources:**\n\n- Resource Name: -\n- Link: -"""
|
6 |
|
|
|
9 |
"""
|
10 |
# Welcome to OntoChat! ๐
|
11 |
|
12 |
+
**Hi there! I'm OntoChat, your conversational assistant for collecting ontology user stories.** ๐
|
13 |
|
14 |
+
I simplify the process of creating ontology user stories by combining human input with GenAI capabilities. Whether you have experience with prompt engineering or not, I'm here to guide you. By Providing predefined templates, focused questions, example answers, and refinement options, I help you generate clear, high-quality user stories.
|
15 |
|
16 |
+
Let's make ontology requirements collection easier and more efficient together!
|
17 |
+
|
18 |
+
For more details, visit [OntoChat on GitHub](https://github.com/King-s-Knowledge-Graph-Lab/OntoChat).
|
|
|
19 |
|
|
|
|
|
20 |
"""
|
21 |
)
|
22 |
|
|
|
76 |
outputs=[user_story_input]
|
77 |
)
|
78 |
|
79 |
+
# with gr.Blocks() as cq_interface:
|
80 |
+
# with gr.Row():
|
81 |
+
# with gr.Column():
|
82 |
+
# cq_chatbot = gr.Chatbot(
|
83 |
+
# value=[
|
84 |
+
# {
|
85 |
+
# "role": "assistant",
|
86 |
+
# "content": (
|
87 |
+
# "I am OntoChat, your conversational ontology engineering assistant. Here is the second step of "
|
88 |
+
# "the system. Please give me your user story and tell me how many competency questions you want "
|
89 |
+
# "me to generate from the user story."
|
90 |
+
# )
|
91 |
+
# }
|
92 |
+
# ],
|
93 |
+
# type="messages"
|
94 |
+
# )
|
95 |
+
# cq_input = gr.Textbox(
|
96 |
+
# label="Chatbot input",
|
97 |
+
# placeholder="Please type your message here and press Enter to interact with the chatbot:"
|
98 |
+
# )
|
99 |
+
# gr.Markdown(
|
100 |
+
# """
|
101 |
+
# ### User story examples
|
102 |
+
# Click the button below to use an example user story from
|
103 |
+
# [Linka](https://github.com/polifonia-project/stories/tree/main/Linka_Computer_Scientist) in Polifonia.
|
104 |
+
# """
|
105 |
+
# )
|
106 |
+
# example_btn = gr.Button(value="Use example user story")
|
107 |
+
# example_btn.click(
|
108 |
+
# fn=load_example_user_story,
|
109 |
+
# inputs=[],
|
110 |
+
# outputs=[cq_input]
|
111 |
+
# )
|
112 |
+
# cq_output = gr.TextArea(
|
113 |
+
# label="Competency questions",
|
114 |
+
# interactive=True
|
115 |
+
# )
|
116 |
+
# cq_input.submit(
|
117 |
+
# fn=cq_generator,
|
118 |
+
# inputs=[
|
119 |
+
# cq_input, cq_chatbot
|
120 |
+
# ],
|
121 |
+
# outputs=[
|
122 |
+
# cq_output, cq_chatbot, cq_input
|
123 |
+
# ]
|
124 |
+
# )
|
125 |
+
|
126 |
+
# clustering_interface = gr.Interface(
|
127 |
+
# fn=clustering_generator,
|
128 |
+
# inputs=[
|
129 |
+
# gr.TextArea(
|
130 |
+
# label="Competency questions",
|
131 |
+
# info="Please copy the previously generated competency questions and paste it here. You can also modify "
|
132 |
+
# "the questions before submitting them."
|
133 |
+
# ),
|
134 |
+
# gr.Dropdown(
|
135 |
+
# value="LLM clustering",
|
136 |
+
# choices=["LLM clustering", "Agglomerative clustering"],
|
137 |
+
# label="Clustering method",
|
138 |
+
# info="Please select the clustering method."
|
139 |
+
# ),
|
140 |
+
# gr.Textbox(
|
141 |
+
# label="Number of clusters (optional for LLM clustering)",
|
142 |
+
# info="Please input the number of clusters you want to generate. And please do not input a number that "
|
143 |
+
# "exceeds the total number of competency questions."
|
144 |
+
# )
|
145 |
+
# ],
|
146 |
+
# outputs=[
|
147 |
+
# gr.Image(label="Visualization"),
|
148 |
+
# gr.Code(
|
149 |
+
# language='json',
|
150 |
+
# label="Competency Question clusters"
|
151 |
+
# )
|
152 |
+
# ],
|
153 |
+
# title="OntoChat",
|
154 |
+
# description="This is the third step of OntoChat. Please copy the generated competency questions from the previous "
|
155 |
+
# "step and run the clustering algorithm to group the competency questions based on their topics. From "
|
156 |
+
# "our experience, LLM clustering has the best performance.",
|
157 |
+
# flagging_mode="never"
|
158 |
+
# )
|
159 |
+
|
160 |
+
# with gr.Blocks() as testing_interface:
|
161 |
+
# gr.Markdown(
|
162 |
+
# """
|
163 |
+
# # OntoChat
|
164 |
+
# This is the final part of OntoChat which performs ontology testing based on the input ontology file and CQs.
|
165 |
+
# """
|
166 |
+
# )
|
167 |
+
|
168 |
+
# with gr.Group():
|
169 |
+
# api_key = gr.Textbox(
|
170 |
+
# label="OpenAI API Key",
|
171 |
+
# placeholder="If you have set the key in other tabs, you don't have to set it again.",
|
172 |
+
# info="Please input your OpenAI API Key if you don't have it set up on your own machine. Please note that "
|
173 |
+
# "the key will only be used for this demo and will not be uploaded or used anywhere else."
|
174 |
+
# )
|
175 |
+
# api_key_btn = gr.Button(value="Set API Key")
|
176 |
+
# api_key_btn.click(fn=set_openai_api_key, inputs=api_key, outputs=api_key)
|
177 |
+
|
178 |
+
# ontology_file = gr.File(label="Ontology file")
|
179 |
+
# ontology_desc = gr.Textbox(
|
180 |
+
# label="Ontology description",
|
181 |
+
# placeholder="Please provide a description of the ontology uploaded to provide basic information and "
|
182 |
+
# "additional context."
|
183 |
+
# )
|
184 |
+
# cq_testing_input = gr.Textbox(
|
185 |
+
# label="Competency questions",
|
186 |
+
# placeholder="Please provide the competency questions that you want to test with."
|
187 |
+
# )
|
188 |
+
# testing_btn = gr.Button(value="Test")
|
189 |
+
# testing_output = gr.TextArea(label="Ontology testing output")
|
190 |
+
# testing_btn.click(
|
191 |
+
# fn=ontology_testing,
|
192 |
+
# inputs=[
|
193 |
+
# ontology_file, ontology_desc, cq_testing_input
|
194 |
+
# ],
|
195 |
+
# outputs=[
|
196 |
+
# testing_output
|
197 |
+
# ]
|
198 |
+
# )
|
199 |
|
200 |
demo = gr.TabbedInterface(
|
201 |
+
# [set_api_key, user_story_interface, cq_interface, clustering_interface, testing_interface],
|
202 |
+
[set_api_key, user_story_interface],
|
203 |
["Set API Key", "User Story Generation", "Competency Question Extraction", "Competency Question Analysis", "Ontology Testing"]
|
204 |
)
|
205 |
|