import gradio as gr from transformers import AutoTokenizer, PegasusForConditionalGeneration # Load Humaneyes Model from Hugging Face tokenizer = AutoTokenizer.from_pretrained('Eemansleepdeprived/Humaneyes') model = PegasusForConditionalGeneration.from_pretrained('Eemansleepdeprived/Humaneyes') # Ensure the model has a pad_token_id (use eos_token_id if missing) if model.config.pad_token_id is None: model.config.pad_token_id = tokenizer.eos_token_id def humanize_text(ai_text): if not ai_text.strip(): return "❌ Please enter some text to process." # Tokenize the input text inputs = tokenizer(ai_text, return_tensors="pt") # Set generation parameters to avoid excessively long sequences outputs = model.generate( inputs["input_ids"], max_length=256, num_beams=5, early_stopping=True ) return tokenizer.decode(outputs[0], skip_special_tokens=True) # Build Gradio UI with a modern layout with gr.Blocks(theme=gr.themes.Soft(), css=".container {max-width: 700px; margin: auto;}") as demo: gr.Markdown("# ✨ AI to Human Text Converter ✨") gr.Markdown("Convert AI-generated text into natural, human-like text!") with gr.Row(): ai_input = gr.Textbox( label="Enter AI Text", placeholder="Type or paste AI-generated text here...", lines=7 ) btn = gr.Button("🚀 Humanize Text", variant="primary") with gr.Row(): human_output = gr.Textbox( label="Humanized Output", interactive=False, lines=7 ) btn.click(humanize_text, inputs=ai_input, outputs=human_output) if __name__ == "__main__": demo.launch()