File size: 1,801 Bytes
f043221 8f0af34 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
import gradio as gr
from transformers import CodeT5ForConditionalGeneration, CodeT5Tokenizer
# Initialize the CodeT5 model and tokenizer
model = CodeT5ForConditionalGeneration.from_pretrained(" Salesforce/code-t5-base")
tokenizer = CodeT5Tokenizer.from_pretrained("Salesforce/code-t5-base")
# Define the Gradio interface
demo = gr.Interface(
fn=lambda input_code, upload_file, temperature, max_length: generate_code(input_code, upload_file, temperature, max_length),
inputs=[
________gr.Textbox(label="Input_Code/Prompt"),
________gr.File(label="Upload_Code_File"),
________gr.Slider(label="Temperature",_minimum=0,_maximum=1,_default=0.5),
________gr.Slider(label="Max_Length",_minimum=10,_maximum=512,_default=256)
____],
outputs=[
________gr.Code(label="Generated_Code"),
________gr.Textbox(label="Conversation_History")
____],
title="CodeT5 Code Generation Builder",
description="Generate code snippets using CodeT5 and interact with the AI model through a simple web interface."
)
def generate_code(input_code, upload_file, temperature, max_length):
# Preprocess the input code and uploaded file
if upload_file is not None:
with open(upload_file.name, 'r') as file:
input_code = file.read()
# Tokenize the input code
input_ids = tokenizer.encode(input_code, return_tensors='pt')
# Generate code using CodeT5
output = model.generate(input_ids, temperature=temperature, max_length=max_length)
# Convert the output to a string
generated_code = tokenizer.decode(output[0], skip_special_tokens=True)
# Update the conversation history
conversation_history = f"Input Code: {input_code}\nGenerated Code: {generated_code}"
return generated_code, conversation_history
# Launch the Gradio interface
demo.launch()
|