Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
tokenizer = AutoTokenizer.from_pretrained("bigcode/santacoder") | |
model = AutoModelForCausalLM.from_pretrained("bigcode/santacoder") | |
fix_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer) | |
def fix_code(code: str) -> str: | |
prompt = f"""# The following Python code has bugs. Fix the code and output only the corrected version.\n\n# Buggy Code:\n{code}\n\n# Fixed Code:\n""" | |
try: | |
result = fix_pipeline(prompt, max_length=256, do_sample=True)[0]["generated_text"] | |
# Post-process to strip out everything before "# Fixed Code" | |
return result.split("# Fixed Code:")[-1].strip() | |
except Exception as e: | |
return f"Error during fixing: {e}" | |