File size: 1,605 Bytes
8aca0aa
 
 
182730a
8aca0aa
 
 
 
 
 
 
 
 
 
 
b912888
182730a
c5328eb
 
6be09f7
cb39e31
c5328eb
 
 
cb39e31
 
 
3c461e3
6be09f7
182730a
22d4131
 
156f863
c92a2d6
 
 
 
 
156f863
c92a2d6
6be09f7
 
 
182730a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import os
API_TOKEN = os.getenv('API_TOKEN')

import gradio as gr
import requests

API_URL = "https://api-inference.huggingface.co/models/tlkh/flan-t5-paraphrase-classify-explain"
headers = {"Authorization": f"Bearer {API_TOKEN}"}

def query(payload):
    response = requests.post(API_URL, headers=headers, json=payload)
    return response.json()

def infer(s1, s2):
    model_input = "Classify and explain the relationship between this pair of sentences: <S1> "+s1+" </S1><S2> "+s2+" </S2>"
    data = query(model_input)

    if "error" in data:
        return "Error: "+ data["error"]
    elif "generated_text" in data[0]:
        return data["generated_text"]
    else:
        return data

title = "Paraphrase Classification and Explanation"
desc = "Classify and explain the semantic relationship between the two sentences"
long_desc = "This is a Flan-T5-Large model fine-tuned to perform paraphrase classification and explanation. It takes in two sentences as inputs."

example1 = ["On Monday, Tom went to the market.","Tom went to the market on Monday."]

s1 = gr.Textbox(value="One",label="Sentence 1")
s2 = gr.Textbox(value="Two",label="Sentence 2")

examples = gr.Examples(examples=[["prompt_1", "prompt_1"],
                                 ["prompt_2", "prompt_2"],
                                 ["prompt_3", "prompt_3"],],
                       inputs=[s1, s2])

demo = gr.Interface(fn=infer, inputs=[s1,s2], outputs="text",
                    examples=examples,
                    title=title,
                    description=desc,
                    article=long_desc)
demo.launch()