Spaces:
Sleeping
Sleeping
File size: 1,345 Bytes
8aca0aa 182730a 8aca0aa b912888 182730a c5328eb 6be09f7 cb39e31 c5328eb cb39e31 3c461e3 6be09f7 182730a 6be09f7 33baa80 6be09f7 182730a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
import os
API_TOKEN = os.getenv('API_TOKEN')
import gradio as gr
import requests
API_URL = "https://api-inference.huggingface.co/models/tlkh/flan-t5-paraphrase-classify-explain"
headers = {"Authorization": f"Bearer {API_TOKEN}"}
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
def infer(s1, s2):
model_input = "Classify and explain the relationship between this pair of sentences: <S1> "+s1+" </S1><S2> "+s2+" </S2>"
data = query(model_input)
if "error" in data:
return "Error: "+ data["error"]
elif "generated_text" in data[0]:
return data["generated_text"]
else:
return data
title = "Paraphrase Classification and Explanation"
desc = "Classify and explain the semantic relationship between the two sentences"
long_desc = "This is a Flan-T5-Large model fine-tuned to perform paraphrase classification and explanation. It takes in two sentences as inputs."
example1 = ["On Monday, Tom went to the market.","Tom went to the market on Monday."]
demo = gr.Interface(fn=infer, inputs=["text", "text"], outputs="text",
examples=gr.Examples(examples=[example1],inputs=["text", "text"]),
title=title,
description=desc,
article=long_desc)
demo.launch()
|