whackthejacker commited on
Commit
8f0af34
·
verified ·
1 Parent(s): 7a302bb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +62 -4
app.py CHANGED
@@ -1,6 +1,64 @@
 
 
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
- gr.load(
4
- "models/Salesforce/codet5-base",
5
- provider="hf-inference",
6
- ).launch()
 
1
+ Here is an example of a Gradio interface code generation builder that meets your requirements:
2
+
3
  import gradio as gr
4
+ from transformers import CodeT5ForConditionalGeneration, CodeT5Tokenizer
5
+
6
+ # Initialize the CodeT5 model and tokenizer
7
+ model = CodeT5ForConditionalGeneration.from_pretrained(" Salesforce/code-t5-base")
8
+ tokenizer = CodeT5Tokenizer.from_pretrained("Salesforce/code-t5-base")
9
+
10
+ # Define the Gradio interface
11
+ demo = gr.Interface(
12
+ fn=lambda input_code, upload_file, temperature, max_length: generate_code(input_code, upload_file, temperature, max_length),
13
+ inputs=[
14
+ ________gr.Textbox(label="Input_Code/Prompt"),
15
+ ________gr.File(label="Upload_Code_File"),
16
+ ________gr.Slider(label="Temperature",_minimum=0,_maximum=1,_default=0.5),
17
+ ________gr.Slider(label="Max_Length",_minimum=10,_maximum=512,_default=256)
18
+ ____],
19
+ outputs=[
20
+ ________gr.Code(label="Generated_Code"),
21
+ ________gr.Textbox(label="Conversation_History")
22
+ ____],
23
+ title="CodeT5 Code Generation Builder",
24
+ description="Generate code snippets using CodeT5 and interact with the AI model through a simple web interface."
25
+ )
26
+
27
+ def generate_code(input_code, upload_file, temperature, max_length):
28
+ # Preprocess the input code and uploaded file
29
+ if upload_file is not None:
30
+ with open(upload_file.name, 'r') as file:
31
+ input_code = file.read()
32
+
33
+ # Tokenize the input code
34
+ input_ids = tokenizer.encode(input_code, return_tensors='pt')
35
+
36
+ # Generate code using CodeT5
37
+ output = model.generate(input_ids, temperature=temperature, max_length=max_length)
38
+
39
+ # Convert the output to a string
40
+ generated_code = tokenizer.decode(output[0], skip_special_tokens=True)
41
+
42
+ # Update the conversation history
43
+ conversation_history = f"Input Code: {input_code}\nGenerated Code: {generated_code}"
44
+
45
+ return generated_code, conversation_history
46
+
47
+ # Launch the Gradio interface
48
+ demo.launch()
49
+
50
+ This code defines a Gradio interface that takes four inputs:
51
+
52
+ A text box for inputting code or prompts
53
+ A file uploader for uploading code files
54
+ A temperature slider to adjust the generation temperature
55
+ A max length slider to adjust the maximum generated code length
56
+
57
+ The interface returns two outputs:
58
+
59
+ A code box displaying the generated code
60
+ A text box displaying the conversation history (including the input code and generated code)
61
+
62
+ When the user interacts with the interface, the generate_code function is called, which preprocesses the input code and uploaded file, tokenizes the input code, generates code using CodeT5, and converts the output to a string. The conversation history is also updated accordingly.
63
 
64
+ Note that you need to install the transformers library and have the CodeT5 model and tokenizer downloaded for this code to work.