|
import gradio as gr |
|
import numpy as np |
|
|
|
gr.Interface.load("models/openai-gpt") |
|
|
|
def predict(sentence1, sentence2): |
|
sentence_pairs = np.array([[str(sentence1), str(sentence2)]]) |
|
test_data = BertSemanticDataGenerator( |
|
sentence_pairs, labels=None, batch_size=1, shuffle=False, include_targets=False, |
|
) |
|
probs = model.predict(test_data[0])[0] |
|
|
|
labels_probs = {labels[i]: float(probs[i]) for i, _ in enumerate(labels)} |
|
return labels_probs |
|
|
|
examples = [["Two women are observing something together.", "Two women are standing with their eyes closed."], |
|
["A smiling costumed woman is holding an umbrella", "A happy woman in a fairy costume holds an umbrella"], |
|
["A soccer game with multiple males playing", "Some men are playing a sport"], |
|
] |
|
|
|
gr.Interface( |
|
fn=predict, |
|
title="basic with GPT", |
|
description = "Natural Language Inference by fine-tuning GPT model", |
|
inputs=["text", "text"], |
|
examples=examples, |
|
|
|
outputs=gr.outputs.Label(num_top_classes=3, label='Semantic similarity'), |
|
cache_examples=True |
|
).launch(debug=True, enable_queue=True) |
|
|
|
|