Oroz / app.py
zcodel's picture
Update app.py
a88e577 verified
raw
history blame
1.25 kB
import gradio as gr
import pandas as pd
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
# Initialize the Hugging Face pipeline
model_name = "gpt2" # Using GPT-2 to ensure compatibility
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
def generate_solutions(query):
# Use the language model to generate solutions
response = generator(query, max_length=100, num_return_sequences=3)
# Extract the generated texts
solutions = [{"Solution": r['generated_text']} for r in response]
# Convert solutions to a DataFrame
df = pd.DataFrame(solutions)
# Convert DataFrame to HTML table
table_html = df.to_html(escape=False, index=False)
return table_html
# Create a Gradio interface
iface = gr.Interface(
fn=generate_solutions,
inputs=gr.Textbox(lines=2, placeholder="Describe the problem with the machine..."),
outputs=gr.HTML(),
title="Oroz: Your Industry Maintenance Assistant",
description="Describe the problem with your machine, and get an organized table of suggested solutions."
)
iface.launch(share=True)