Spaces:
Sleeping
Sleeping
import os | |
import json | |
import subprocess | |
from tempfile import NamedTemporaryFile | |
import gradio as gr | |
from huggingface_hub import InferenceClient | |
import wandb | |
import shutil | |
import sys | |
# 1. Initialize W&B (free tier) for basic logging | |
# non-interactive login | |
#api_key = os.getenv("WANDB_API_KEY") | |
#if api_key: | |
# wandb.login(key=api_key, relogin=True) | |
# wandb.init(project="dipesh-gen-ai-2025-personal", entity="dipesh-gen-ai-2025") | |
#else: | |
# disable wandb entirely if key missing | |
# wandb.init(mode="disabled") | |
key = os.getenv("WANDB_API_KEY") | |
if key: | |
wandb.login(key=key, relogin=True) | |
# Always run anonymously (no entity permission needed) | |
wandb.init( | |
project="misra-smart-fixer", | |
mode="online", | |
anonymous="must" | |
) | |
# 2. Hugging Face Inference Client (CPU-only, free quota) | |
HF_TOKEN = os.getenv("HF_API_TOKEN") | |
client = InferenceClient(model="declare-lab/flan-alpaca-gpt4", token=HF_TOKEN) | |
def ensure_tool(name: str): | |
if shutil.which(name) is None: | |
print(f"Error: `{name}` not found. Please install it and retry.", file=sys.stderr) | |
sys.exit(1) | |
def run_cppcheck(source_code: str): | |
# Check for the code checker tool | |
ensure_tool("cppcheck") | |
# Write uploaded code to temp file | |
tf = NamedTemporaryFile(suffix=".cpp", delete=False) | |
tf.write(source_code.encode()) | |
tf.flush() | |
tf.close() | |
# Run Cppcheck with MISRA 2012 profile, JSON output | |
cmd = [ | |
"cppcheck", "--enable=all", | |
"--std=c++17", "--language=c++", | |
"--profile=misra-cpp-2012", | |
"--template=json", tf.name | |
] | |
res = subprocess.run(cmd, capture_output=True, text=True) | |
try: | |
issues = json.loads(res.stderr) | |
except json.JSONDecodeError: | |
issues = [] | |
return tf.name, issues | |
def build_prompt(filename: str, issues: list): | |
with open(filename) as f: | |
src = f.read() | |
if not issues: | |
return None | |
summary = "\n".join([ | |
f"- {item['message']} at line {item['line']}" | |
for item in issues | |
]) | |
prompt = f""" | |
You are a C++ expert. The code below may violate MISRA C++:2012 rules. | |
Source code: | |
Issues: | |
{summary} | |
For each issue, suggest a concise patch (diff format) to fix it. | |
Only output the unified diff, no extra commentary. | |
""" | |
return prompt.strip() | |
def predict_patch(prompt: str): | |
response = client.text_generation(prompt, max_new_tokens=256) | |
patch = response.generated_text | |
wandb.log({"prompt": prompt, "patch": patch}) | |
return patch | |
def process_file(file_obj): | |
#src = file_obj.read().decode() | |
src = file_obj | |
fname, issues = run_cppcheck(src) | |
prompt = build_prompt(fname, issues) | |
if prompt is None: | |
return "No MISRA violations found.", None | |
patch = predict_patch(prompt) | |
return "Patch generated below:", patch | |
# Gradio UI | |
iface = gr.Interface( | |
fn=process_file, | |
inputs=gr.File(file_types=[".c", ".cpp", ".h", ".hpp"]), | |
outputs=[gr.Text(), gr.Text()], | |
title="MISRA Smart Fixer", | |
description="Upload C/C++ code to auto-fix MISRA violations.", | |
allow_flagging="never" | |
) | |
if __name__ == "__main__": | |
iface.launch(server_name="0.0.0.0", server_port=7860) | |