Spaces:
Sleeping
Sleeping
File size: 664 Bytes
a2e175e 60370a2 adc6ac8 029dadd b5b33c5 60370a2 b5b33c5 60370a2 b5b33c5 60370a2 b5b33c5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline,EncoderDecoderConfig
tokenizer = AutoTokenizer.from_pretrained("codellama/CodeLlama-34b-Instruct-hf")
model = EncoderDecoderConfig.from_pretrained("codellama/CodeLlama-34b-Instruct-hf")
fix_pipeline = pipeline("text2text-generation", model=model, tokenizer=tokenizer)
def fix_code(code: str) -> str:
prompt = f"""The following Python code has some bugs. Fix the code:\n\n{code}\n\nCorrected version:"""
try:
result = fix_pipeline(prompt, max_length=256)[0]["generated_text"]
return result.strip()
except Exception as e:
return f"Error during fixing: {e}" |