import gradio as gr from transformers import CodeT5ForConditionalGeneration, CodeT5Tokenizer # Initialize the CodeT5 model and tokenizer model = CodeT5ForConditionalGeneration.from_pretrained(" Salesforce/code-t5-base") tokenizer = CodeT5Tokenizer.from_pretrained("Salesforce/code-t5-base") # Define the Gradio interface demo = gr.Interface( fn=lambda input_code, upload_file, temperature, max_length: generate_code(input_code, upload_file, temperature, max_length), inputs=[ ________gr.Textbox(label="Input_Code/Prompt"), ________gr.File(label="Upload_Code_File"), ________gr.Slider(label="Temperature",_minimum=0,_maximum=1,_default=0.5), ________gr.Slider(label="Max_Length",_minimum=10,_maximum=512,_default=256) ____], outputs=[ ________gr.Code(label="Generated_Code"), ________gr.Textbox(label="Conversation_History") ____], title="CodeT5 Code Generation Builder", description="Generate code snippets using CodeT5 and interact with the AI model through a simple web interface." ) def generate_code(input_code, upload_file, temperature, max_length): # Preprocess the input code and uploaded file if upload_file is not None: with open(upload_file.name, 'r') as file: input_code = file.read() # Tokenize the input code input_ids = tokenizer.encode(input_code, return_tensors='pt') # Generate code using CodeT5 output = model.generate(input_ids, temperature=temperature, max_length=max_length) # Convert the output to a string generated_code = tokenizer.decode(output[0], skip_special_tokens=True) # Update the conversation history conversation_history = f"Input Code: {input_code}\nGenerated Code: {generated_code}" return generated_code, conversation_history # Launch the Gradio interface demo.launch()