Spaces:
Sleeping
Sleeping
File size: 1,203 Bytes
0ee1d88 a01cd5c 0ee1d88 a01cd5c b51cd11 1939e34 a01cd5c b51cd11 a01cd5c 0ee1d88 1939e34 0ee1d88 b51cd11 0ee1d88 b51cd11 69e6331 0ee1d88 b51cd11 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
import gradio as gr
import requests
API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct"
headers = {"Authorization": "Bearer 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"}
API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1"
headers2 = {"Authorization": "Bearer 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"}
def query(question):
payload = {"question": "what is the context of "+question+" : "}
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()["answer"]
def query2(question, context):
payload = {"question": question, "context": context}
response = requests.post(API_URL2, headers=headers2, json=payload)
return response.json()["answer"]
iface = gr.Interface(
fn=query2,
inputs=[gr.Textbox("question"), gr.Textbox("context")],
outputs=gr.Textbox("answer"),
title="AI Interface 2",
description="Ask the AI model anything!",
)
iface2 = gr.Interface(
fn=query,
inputs=[gr.Textbox("question")],
outputs=gr.Textbox("answer"),
title="AI Interface",
description="Ask the AI model anything!",
)
gr.launch_in_debugger([iface, iface2])
|