from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline # Load the same CodeT5 model tokenizer = AutoTokenizer.from_pretrained("Salesforce/codet5-base") model = AutoModelForSeq2SeqLM.from_pretrained("Salesforce/codet5-base") modifier_pipeline = pipeline("text2text-generation", model=model, tokenizer=tokenizer) def modify_code(code: str, instruction: str) -> str: """ Modifies code based on user's instruction prompt. """ prompt = f"Instruction: {instruction}\nCode:\n{code}\nModified Code:" try: result = modifier_pipeline(prompt, max_length=256)[0]["generated_text"] return result.strip() except Exception as e: return f"Error during modification: {e}"