File size: 763 Bytes
18a1145
60370a2
18a1145
 
 
60370a2
 
18a1145
60370a2
18a1145
 
 
60370a2
18e62cf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline

tokenizer = AutoTokenizer.from_pretrained("bigcode/santacoder")
model = AutoModelForCausalLM.from_pretrained("bigcode/santacoder")
fix_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer)

def fix_code(code: str) -> str:
    prompt = f"""# The following Python code has bugs. Fix the code and output only the corrected version.\n\n# Buggy Code:\n{code}\n\n# Fixed Code:\n"""
    try:
        result = fix_pipeline(prompt, max_length=256, do_sample=True)[0]["generated_text"]
        # Post-process to strip out everything before "# Fixed Code"
        return result.split("# Fixed Code:")[-1].strip()
    except Exception as e:
        return f"Error during fixing: {e}"