import os API_TOKEN = os.getenv('API_TOKEN') import gradio as gr import requests API_URL = "https://api-inference.huggingface.co/models/tlkh/flan-t5-paraphrase-classify-explain" headers = {"Authorization": f"Bearer {API_TOKEN}"} def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() def infer(s1, s2): model_input = "Classify and explain the relationship between this pair of sentences: "+s1+" "+s2+" " data = query({ "inputs": model_input, "parameters": {"max_length": 128}, }) if "error" in data: return "Error: "+ data["error"] elif "generated_text" in data[0]: output = data[0]["generated_text"].replace(" ; ", "\n") return output else: return data title = "Paraphrase Classification and Explanation" desc = "Classify and explain the semantic relationship between the two sentences" long_desc = "This is a Flan-T5-Large model fine-tuned to perform paraphrase classification and explanation. It takes in two sentences as inputs. Feel free to modify the example inputs or enter in your own sentences." s1 = gr.Textbox(value="On Monday, Tom went to the market.",label="Sentence 1") s2 = gr.Textbox(value="Tom went to the market.",label="Sentence 2") demo = gr.Interface(fn=infer, inputs=[s1,s2], outputs="text", title=title, description=desc, article=long_desc, ) demo.launch()