|
import streamlit as st |
|
from huggingface_hub import InferenceClient |
|
import os |
|
from typing import Iterator |
|
|
|
|
|
API_KEY = os.getenv("TOGETHER_API_KEY") |
|
if not API_KEY: |
|
raise ValueError("API key is missing! Make sure TOGETHER_API_KEY is set in the Secrets.") |
|
|
|
|
|
|
|
@st.cache_resource |
|
def get_client(): |
|
return InferenceClient( |
|
provider="together", |
|
api_key=API_KEY |
|
) |
|
|
|
def process_file(file) -> str: |
|
"""Process uploaded file and return its content""" |
|
if file is None: |
|
return "" |
|
|
|
try: |
|
content = file.getvalue().decode('utf-8') |
|
return content |
|
except Exception as e: |
|
return f"Error reading file: {str(e)}" |
|
|
|
def generate_response( |
|
message: str, |
|
history: list[tuple[str, str]], |
|
system_message: str, |
|
max_tokens: int, |
|
temperature: float, |
|
top_p: float, |
|
files=None |
|
) -> Iterator[str]: |
|
"""Generate streaming response from the model""" |
|
client = get_client() |
|
|
|
|
|
|
|
all_content = "" |
|
if files: |
|
file_contents = [process_file(file) for file in files] |
|
all_content = "\n\n".join([ |
|
f"File {i+1} content:\n{content}" |
|
for i, content in enumerate(file_contents) |
|
]) |
|
|
|
if all_content: |
|
message = f"{all_content}\n\nUser message:\n{message}" |
|
|
|
messages = [{"role": "system", "content": system_message}] |
|
|
|
|
|
for user_msg, assistant_msg in history: |
|
if user_msg: |
|
messages.append({"role": "user", "content": user_msg}) |
|
if assistant_msg: |
|
messages.append({"role": "assistant", "content": assistant_msg}) |
|
|
|
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
try: |
|
stream = client.chat.completions.create( |
|
model="deepseek-ai/DeepSeek-R1", |
|
messages=messages, |
|
max_tokens=max_tokens, |
|
stream=True, |
|
temperature=temperature, |
|
top_p=top_p, |
|
) |
|
|
|
for chunk in stream: |
|
if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content: |
|
yield chunk.choices[0].delta.content |
|
|
|
except Exception as e: |
|
yield f"Error: {str(e)}" |
|
|
|
def main(): |
|
st.set_page_config(page_title="DeepSeek Chat", page_icon="π", layout="wide") |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
st.title("DeepSeek Chat with File Upload") |
|
st.markdown("Chat with DeepSeek AI model. You can optionally upload files for the model to analyze.") |
|
|
|
|
|
with st.sidebar: |
|
st.header("Settings") |
|
system_message = st.text_area( |
|
"System Message", |
|
value="You are a friendly Chatbot.", |
|
height=100 |
|
) |
|
max_tokens = st.slider( |
|
"Max Tokens", |
|
min_value=1, |
|
max_value=8192, |
|
value=8192, |
|
step=1 |
|
) |
|
temperature = st.slider( |
|
"Temperature", |
|
min_value=0.1, |
|
max_value=4.0, |
|
value=0.0, |
|
step=0.1 |
|
) |
|
top_p = st.slider( |
|
"Top-p (nucleus sampling)", |
|
min_value=0.1, |
|
max_value=1.0, |
|
value=0.95, |
|
step=0.05 |
|
) |
|
uploaded_file = st.file_uploader( |
|
"Upload File (optional)", |
|
type=['txt', 'py', 'md', 'swift', 'java', 'js', 'ts', 'rb', 'go', |
|
'php', 'c', 'cpp', 'h', 'hpp', 'cs', 'html', 'css', 'kt'], |
|
accept_multiple_files=True |
|
) |
|
|
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.write(message["content"]) |
|
|
|
|
|
if prompt := st.chat_input("What would you like to know?"): |
|
|
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
with st.chat_message("user"): |
|
st.write(prompt) |
|
|
|
|
|
with st.chat_message("assistant"): |
|
response_placeholder = st.empty() |
|
full_response = "" |
|
|
|
|
|
history = [(msg["content"], next_msg["content"]) |
|
for msg, next_msg in zip(st.session_state.messages[::2], st.session_state.messages[1::2])] |
|
|
|
|
|
for response_chunk in generate_response( |
|
prompt, |
|
history, |
|
system_message, |
|
max_tokens, |
|
temperature, |
|
top_p, |
|
uploaded_file |
|
): |
|
full_response += response_chunk |
|
print(full_response) |
|
response_placeholder.markdown(full_response + "β") |
|
|
|
response_placeholder.markdown(full_response) |
|
|
|
|
|
st.session_state.messages.append({"role": "assistant", "content": full_response}) |
|
|
|
if __name__ == "__main__": |
|
main() |