File size: 1,634 Bytes
caccde0 f217037 caccde0 50609fe f217037 50609fe f217037 50609fe f217037 50609fe f217037 caccde0 50609fe caccde0 50609fe caccde0 f217037 caccde0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
# agents/debugger.py
from agents.base_agent import BaseAgent, ACPMessage
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
class DebuggerAgent(BaseAgent):
def __init__(self):
super().__init__(name="BugBot", role="Debugger")
self.tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-multi")
self.model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-350M-multi")
def generate_debug_comment(self, code: str) -> str:
prompt = f"# Review this Python code and ask a smart debugging or testing question:\n{code}\n# Question:"
inputs = self.tokenizer(prompt, return_tensors="pt", truncation=True)
outputs = self.model.generate(
inputs["input_ids"],
max_length=128,
do_sample=True,
temperature=0.7,
pad_token_id=self.tokenizer.eos_token_id
)
reply = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
return reply[len(prompt):].strip()
def receive_message(self, message: ACPMessage) -> ACPMessage:
if message.performative in ["inform", "request"]:
comment = self.generate_debug_comment(message.content)
return self.create_message(
receiver=message.sender,
performative="request",
content=comment or "Can you improve error handling?"
)
else:
return self.create_message(
receiver=message.sender,
performative="request",
content="Can you show me some Python code?"
)
|