File size: 1,522 Bytes
8aca0aa
 
 
182730a
8aca0aa
 
 
 
 
 
 
 
 
 
 
8f83cc7
 
 
 
182730a
c5328eb
 
6be09f7
8f83cc7
 
c5328eb
 
 
cb39e31
 
8f83cc7
3c461e3
529f20b
 
156f863
 
6be09f7
 
529f20b
 
 
182730a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os
API_TOKEN = os.getenv('API_TOKEN')

import gradio as gr
import requests

API_URL = "https://api-inference.huggingface.co/models/tlkh/flan-t5-paraphrase-classify-explain"
headers = {"Authorization": f"Bearer {API_TOKEN}"}

def query(payload):
    response = requests.post(API_URL, headers=headers, json=payload)
    return response.json()

def infer(s1, s2):
    model_input = "Classify and explain the relationship between this pair of sentences: <S1> "+s1+" </S1><S2> "+s2+" </S2>"
    data = query({
        "inputs": model_input,
        "parameters": {"max_length": 128},
    })

    if "error" in data:
        return "Error: "+ data["error"]
    elif "generated_text" in data[0]:
        output = data[0]["generated_text"].replace(" ; ", "\n")
        return output
    else:
        return data

title = "Paraphrase Classification and Explanation"
desc = "Classify and explain the semantic relationship between the two sentences"
long_desc = "This is a Flan-T5-Large model fine-tuned to perform paraphrase classification and explanation. It takes in two sentences as inputs. Feel free to modify the example inputs or enter in your own sentences."

s1 = gr.Textbox(value="On Monday, Tom went to the market.",label="Sentence 1")
s2 = gr.Textbox(value="Tom went to the market.",label="Sentence 2")

demo = gr.Interface(fn=infer, inputs=[s1,s2], outputs="text",
                    title=title,
                    description=desc,
                    article=long_desc,
                    )

demo.launch()