import spaces from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer import gradio as gr from threading import Thread import os from gradio_modal import Modal checkpoint = "WillHeld/soft-raccoon" device = "cuda" tokenizer = AutoTokenizer.from_pretrained(checkpoint) model = AutoModelForCausalLM.from_pretrained(checkpoint).to(device) @spaces.GPU(duration=120) def predict(message, history, temperature, top_p): history.append({"role": "user", "content": message}) input_text = tokenizer.apply_chat_template(history, tokenize=False, add_generation_prompt=True) inputs = tokenizer.encode(input_text, return_tensors="pt").to(device) # Create a streamer streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True) # Set up generation parameters generation_kwargs = { "input_ids": inputs, "max_new_tokens": 1024, "temperature": float(temperature), "top_p": float(top_p), "do_sample": True, "streamer": streamer, "eos_token_id": 128009, } # Run generation in a separate thread thread = Thread(target=model.generate, kwargs=generation_kwargs) thread.start() # Yield from the streamer as tokens are generated partial_text = "" for new_text in streamer: partial_text += new_text yield partial_text # Function to handle the report submission def submit_report(satisfaction, feedback_text): # In a real application, you might save this to a database or file print(f"Report submitted - Satisfaction: {satisfaction}, Feedback: {feedback_text}") return "Thank you for your feedback! Your report has been submitted." with gr.Blocks() as demo: with gr.Row(): with gr.Column(scale=3): chatbot = gr.ChatInterface( predict, additional_inputs=[ gr.Slider(0.1, 2.0, value=0.7, step=0.1, label="Temperature"), gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-P") ], type="messages" ) with gr.Column(scale=1): report_button = gr.Button("File a Report", variant="primary") # Create the modal with feedback form components with Modal(visible=False) as feedback_modal: with gr.Column(): gr.Markdown("## We value your feedback!") gr.Markdown("Please tell us about your experience with the model.") satisfaction = gr.Radio( ["Very satisfied", "Satisfied", "Neutral", "Unsatisfied", "Very unsatisfied"], label="How satisfied are you with the model's responses?", value="Neutral" ) feedback_text = gr.Textbox( lines=5, label="Please provide any additional feedback or describe issues you encountered:", placeholder="Enter your detailed feedback here..." ) submit_button = gr.Button("Submit Feedback", variant="primary") response_text = gr.Textbox(label="Status", interactive=False) # Connect the "File a Report" button to show the modal report_button.click( lambda: Modal(visible=True), None, feedback_modal ) # Connect the submit button to the submit_report function submit_button.click( submit_report, inputs=[satisfaction, feedback_text], outputs=response_text ) demo.launch()