File size: 1,866 Bytes
8aca0aa
 
 
182730a
8aca0aa
 
 
 
 
 
 
 
 
 
 
8f83cc7
 
 
 
182730a
c5328eb
03fdc7b
6be09f7
5e1f687
8f83cc7
c5328eb
a4f8808
c5328eb
cb39e31
541525b
3c461e3
a4f8808
 
156f863
5089ef9
 
a4f8808
 
6be09f7
529f20b
 
 
182730a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import os
API_TOKEN = os.getenv('API_TOKEN')

import gradio as gr
import requests

API_URL = "https://api-inference.huggingface.co/models/tlkh/flan-t5-paraphrase-classify-explain"
headers = {"Authorization": f"Bearer {API_TOKEN}"}

def query(payload):
    response = requests.post(API_URL, headers=headers, json=payload)
    return response.json()

def infer(s1, s2):
    model_input = "Classify and explain the relationship between this pair of sentences: <S1> "+s1+" </S1><S2> "+s2+" </S2>"
    data = query({
        "inputs": model_input,
        "parameters": {"max_length": 128},
    })

    if "error" in data:
        return ["Error", str(data["error"])+" You may try again in about 1 minute."]
    elif "generated_text" in data[0]:
        output = data[0]["generated_text"].split(" ; ")
        return output
    else:
        return data, data

title = "Paraphrase Classification and Explanation"
long_desc = "This is a Flan-T5-Large model fine-tuned to perform paraphrase classification and explanation. The model takes in two sentences as inputs, and outputs a classification label and explanation. The model is trained on our Semantic Paraphrase Types dataset. Feel free to modify the example inputs or enter in your own sentences. Due to existing limitations, the explanation generated may not be entirely accurate. We hope that in future work, more powerful models can be trained and produce more accurate explanations."

s1 = gr.Textbox(value="On Monday, Tom went to the market and bought a pig.",label="Sentence 1")
s2 = gr.Textbox(value="Tom went to the market.",label="Sentence 2")

label = gr.Markdown(value="Label: ")
explain = gr.Markdown(value="Explanation: ")

demo = gr.Interface(fn=infer, inputs=[s1,s2], outputs=[label,explain],
                    title=title,
                    article=long_desc,
                    )

demo.launch()