File size: 1,570 Bytes
2ad7d0e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# app.py
import gradio as gr
from evo_transformer import EvoTransformer
import matplotlib.pyplot as plt

def run_evolution(generations):
    evo = EvoTransformer()
    evo.evolve(generations)
    history = evo.get_history()
    results = evo.evaluate()

    # Format history for output
    trait_logs = ""
    for i, config in enumerate(history):
        trait_logs += f"Generation {i}: {config}\n"

    # Plot number of attention heads over generations
    heads = [conf["attention_heads"] for conf in history]
    layers = [conf["layers"] for conf in history]
    ffn = [conf["ffn_dim"] for conf in history]

    fig, ax = plt.subplots()
    ax.plot(range(len(heads)), heads, label="Attention Heads")
    ax.plot(range(len(layers)), layers, label="Layers")
    ax.plot(range(len(ffn)), ffn, label="FFN Dim")
    ax.set_title("Evolution of Traits")
    ax.set_xlabel("Generation")
    ax.set_ylabel("Value")
    ax.legend()

    return trait_logs, results["accuracy"], round(results["params"], 2), fig

# Gradio UI
demo = gr.Interface(
    fn=run_evolution,
    inputs=gr.Slider(minimum=1, maximum=10, step=1, label="Generations"),
    outputs=[
        gr.Textbox(label="Evolution History"),
        gr.Number(label="Simulated Accuracy"),
        gr.Number(label="Estimated Parameters (M)"),
        gr.Plot(label="Trait Evolution Plot"),
    ],
    title="🧬 EvoTransformer Demo",
    description="An evolving Transformer that mutates architecture traits during training. Watch the architecture change in real time!"
)

if __name__ == "__main__":
    demo.launch()