Spaces:
Running
Running
File size: 2,615 Bytes
9c3d10b c824607 b8284a1 9c3d10b b8284a1 3fde40c b8284a1 3fde40c a5cf614 9c3d10b 3fde40c e363c7d 3fde40c e363c7d aa40e0e 167ff63 aa40e0e e363c7d 3fde40c add7b09 aa40e0e add7b09 aa40e0e add7b09 aa40e0e add7b09 aa40e0e 167ff63 aa40e0e 167ff63 aa40e0e add7b09 1536913 add7b09 c3f6c7c 3fde40c e363c7d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
import gradio as gr
import sys
sys.path.insert(0, "ASG.API/")
from ASGModels import ASG
ASGAI=ASG(isForm=False)
choices=[
"Group",
"Technique",
"Software"
]
model_choices = gr.Dropdown(
choices=choices,
label="اختر النموذج",
value="Group",
)
import gradio as gr
def t2t(text, namn_model):
if namn_model == "Group":
out = ASGAI.Group.predictAPI(text)
elif namn_model == "Technique":
out = ASGAI.Tec.predictAPI(text)
else:
out = ASGAI.Soft.predictAPI(text)
return str(out)
def t2seq(text, namn_model):
if namn_model == "Group":
out = ASGAI.Group.Predict_ALL(text)
elif namn_model == "Technique":
out = ASGAI.Tec.Predict_ALL(text)
else:
out = ASGAI.Soft.Predict_ALL(text)
return str(out)
def echo(message, history):
text=t2seq(message,"Group")
return text
# Use Blocks
with gr.Blocks() as demo:
with gr.Row():
with gr.Tab("Thread Base"):
gr.Markdown("### Thread Base")
with gr.Row():
with gr.Tab("T2T"):
text_input = gr.Textbox(label="Input Text")
model_choices = gr.Dropdown(choices=["Group", "Technique", "Soft"], label="Model",value="Group",)
text_output = gr.Textbox(label="Output")
submit_btn = gr.Button("Submit")
submit_btn.click(fn=t2t, inputs=[text_input, model_choices], outputs=text_output)
with gr.Tab("T2Seq"):
text_input_seq = gr.Textbox(label="Input Text")
model_choices_seq = gr.Dropdown(choices=["Group", "Technique", "Soft"], label="Model",value="Group",)
text_output_seq = gr.Textbox(label="Output")
submit_btn_seq = gr.Button("Submit")
submit_btn_seq.click(fn=t2seq, inputs=[text_input_seq, model_choices_seq], outputs=text_output_seq)
with gr.Tab("T2Sinaro"):
model_choices_seq1 = gr.Dropdown(choices=["Group", "Technique", "Soft"], label="Model",value="Group",)
gr.ChatInterface(fn=echo, examples=["hello", "hola", "merhaba"], title="Echo Bot")
with gr.Tab("Stute Base"):
gr.Markdown("### Stute Base")
demo.launch()
# demo.launch()
|