Spaces:
Running
Running
File size: 1,254 Bytes
c0a983b 43e97e3 90efa34 43e97e3 c0a983b 90efa34 732adcd 90efa34 c0a983b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
import requests
import streamlit as st
# from streamlitui import StreamlitUI
# stui = StreamlitUI(api_url="http://localhost:8000") # FastAPI backend URL
st.title("FastAPI ChatBot")
st.write("Upload a file to FastAPI")
file = st.file_uploader("Choose a file", type=["pdf"])
if st.button("Submit"):
if file is not None:
files = {"file": (file.name, file, file.type)}
response = requests.post("https://ahmed-eisa-genai-service.hf.space/upload", files=files)
st.write(response.text)
else:
st.write("No file uploaded.")
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Write your prompt in this input field"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.text(prompt)
response = requests.post(
"https://ahmed-eisa-genai-service.hf.space/generate/text",
json={"model": "gpt-3.5-turbo","prompt": prompt,"temperature":0.7},
)
response.raise_for_status()
with st.chat_message("assistant"):
st.markdown(response.text) |