File size: 3,175 Bytes
3b6fde5
 
 
 
 
 
 
ca20e12
 
3b6fde5
49ddfe8
484861b
5d910a8
 
 
 
 
484861b
5d910a8
 
 
 
 
 
 
 
 
 
 
 
484861b
3b6fde5
 
 
 
8102149
 
 
 
 
3b6fde5
8102149
 
 
3b6fde5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e699979
 
3b6fde5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import os
import json
import subprocess
from tempfile import NamedTemporaryFile
import gradio as gr
from huggingface_hub import InferenceClient
import wandb
import shutil
import sys

# 1. Initialize W&B (free tier) for basic logging
# non-interactive login
#api_key = os.getenv("WANDB_API_KEY")
#if api_key:
#    wandb.login(key=api_key, relogin=True)
#    wandb.init(project="dipesh-gen-ai-2025-personal", entity="dipesh-gen-ai-2025")
#else:
    # disable wandb entirely if key missing
#    wandb.init(mode="disabled")

key = os.getenv("WANDB_API_KEY")
if key:
    wandb.login(key=key, relogin=True)

# Always run anonymously (no entity permission needed)
wandb.init(
    project="misra-smart-fixer",
    mode="online",
    anonymous="must"
)

# 2. Hugging Face Inference Client (CPU-only, free quota)
HF_TOKEN = os.getenv("HF_API_TOKEN")
client = InferenceClient(model="declare-lab/flan-alpaca-gpt4", token=HF_TOKEN)

def ensure_tool(name: str):
    if shutil.which(name) is None:
        print(f"Error: `{name}` not found. Please install it and retry.", file=sys.stderr)
        sys.exit(1)

def run_cppcheck(source_code: str):
    # Check for the code checker tool
    ensure_tool("cppcheck")

    # Write uploaded code to temp file
    tf = NamedTemporaryFile(suffix=".cpp", delete=False)
    tf.write(source_code.encode())
    tf.flush()
    tf.close()

    # Run Cppcheck with MISRA 2012 profile, JSON output
    cmd = [
        "cppcheck", "--enable=all",
        "--std=c++17", "--language=c++",
        "--profile=misra-cpp-2012",
        "--template=json", tf.name
    ]
    res = subprocess.run(cmd, capture_output=True, text=True)
    try:
        issues = json.loads(res.stderr)
    except json.JSONDecodeError:
        issues = []
    return tf.name, issues

def build_prompt(filename: str, issues: list):
    with open(filename) as f:
        src = f.read()
    if not issues:
        return None
    summary = "\n".join([
        f"- {item['message']} at line {item['line']}"
        for item in issues
    ])
    prompt = f"""
You are a C++ expert. The code below may violate MISRA C++:2012 rules.
Source code:
Issues:
{summary}

For each issue, suggest a concise patch (diff format) to fix it.
Only output the unified diff, no extra commentary.
"""
    return prompt.strip()

def predict_patch(prompt: str):
    response = client.text_generation(prompt, max_new_tokens=256)
    patch = response.generated_text
    wandb.log({"prompt": prompt, "patch": patch})
    return patch

def process_file(file_obj):
    #src = file_obj.read().decode()
    src = file_obj
    fname, issues = run_cppcheck(src)
    prompt = build_prompt(fname, issues)
    if prompt is None:
        return "No MISRA violations found.", None
    patch = predict_patch(prompt)
    return "Patch generated below:", patch

# Gradio UI
iface = gr.Interface(
    fn=process_file,
    inputs=gr.File(file_types=[".c", ".cpp", ".h", ".hpp"]),
    outputs=[gr.Text(), gr.Text()],
    title="MISRA Smart Fixer",
    description="Upload C/C++ code to auto-fix MISRA violations.",
    allow_flagging="never"
)

if __name__ == "__main__":
    iface.launch(server_name="0.0.0.0", server_port=7860)