peterciank commited on
Commit
03b119a
·
verified ·
1 Parent(s): dab1ed6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -14
app.py CHANGED
@@ -1,21 +1,26 @@
1
- import os
2
  import requests
3
- import gradio as gr
4
 
5
- # Define function to query the AI model
 
 
 
 
6
  def query(payload):
7
- token = os.getenv("HF_TOKEN", None)
8
- headers = {"Authorization": f"Bearer {token}"}
9
- API_URL = "https://api-inference.huggingface.co/models/facebook/bart-large-cnn"
10
  response = requests.post(API_URL, headers=headers, json=payload)
11
  return response.json()
12
 
13
- # Create Gradio interface
14
- def chat(input_text):
15
- data = query({"inputs": input_text, "parameters": {"do_sample": False}})
16
- return data[0]['generated_text']
17
-
18
- input_text = gr.Textbox(lines=7, label="Input Text")
19
- output_text = gr.Textbox(label="Model Response")
20
 
21
- gr.Interface(fn=chat, inputs=input_text, outputs=output_text, title="AI Chat", description="Chat with AI model").launch()
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
  import requests
 
3
 
4
+ # Set up Hugging Face API details
5
+ headers = {"Authorization": f"Bearer {HF_TOKEN}"}
6
+ API_URL = "https://api-inference.huggingface.co/models/facebook/bart-large-cnn"
7
+
8
+ # Function to query the Hugging Face model
9
  def query(payload):
 
 
 
10
  response = requests.post(API_URL, headers=headers, json=payload)
11
  return response.json()
12
 
13
+ # Streamlit UI
14
+ st.title("Chat App with Hugging Face")
15
+ user_input = st.text_input("You:", "")
 
 
 
 
16
 
17
+ if st.button("Send"):
18
+ if user_input.strip() != "":
19
+ # Query Hugging Face model
20
+ data = query({"inputs": user_input, "parameters": {"do_sample": False}})
21
+
22
+ # Display response
23
+ if data and "summary_text" in data[0]:
24
+ st.text_area("Bot:", value=data[0]["summary_text"], height=150)
25
+ else:
26
+ st.error("No response from the model")