Ronith55 commited on
Commit
9bfc384
·
verified ·
1 Parent(s): ad69b37

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -19
app.py CHANGED
@@ -1,24 +1,25 @@
1
- import gradio as gr
2
  from transformers import pipeline
3
 
4
- # Load the model pipeline with trust_remote_code=True
5
- pipe = pipeline("image-text-to-text", model="deepseek-ai/deepseek-vl2-small", trust_remote_code=True)
6
 
7
- # Define the Gradio function
8
- def generate_response(message):
9
- messages = [{"role": "user", "content": message}]
10
- result = pipe(messages)
11
- return result[0]["generated_text"] if isinstance(result, list) else result
12
 
13
- # Create the Gradio interface
14
- iface = gr.Interface(
15
- fn=generate_response,
16
- inputs=gr.Textbox(label="Enter your message"),
17
- outputs=gr.Textbox(label="Model Response"),
18
- title="DeepSeek-VL2 Chatbot",
19
- description="Ask anything and get a response from the DeepSeek-VL2-Small model."
20
- )
21
 
22
- # Launch the app
23
- if __name__ == "__main__":
24
- iface.launch()
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
  from transformers import pipeline
3
 
4
+ # Ensure the latest Transformers library is used
5
+ st.write("Initializing model...")
6
 
7
+ try:
8
+ pipe = pipeline("image-text-to-text", model="deepseek-ai/deepseek-vl2-small", trust_remote_code=True)
9
+ except ValueError as e:
10
+ st.error(f"Model loading failed: {e}")
11
+ st.stop()
12
 
13
+ st.title("DeepSeek-VL2 Chatbot")
 
 
 
 
 
 
 
14
 
15
+ # User input
16
+ user_input = st.text_input("Enter your message:", "")
17
+
18
+ if st.button("Generate Response"):
19
+ if user_input:
20
+ messages = [{"role": "user", "content": user_input}]
21
+ result = pipe(messages)
22
+ response = result[0]["generated_text"] if isinstance(result, list) else result
23
+ st.text_area("Model Response:", response, height=200)
24
+ else:
25
+ st.warning("Please enter a message.")