Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
from dotenv import load_dotenv | |
import os | |
# Load environment variables | |
load_dotenv() | |
# Load the model and tokenizer | |
#tokenizer = AutoTokenizer.from_pretrained("google/gemma-2b") | |
#model = AutoModelForCausalLM.from_pretrained("google/gemma-2b") | |
tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-rw-1b", trust_remote_code=True) | |
model = AutoModelForCausalLM.from_pretrained("tiiuae/falcon-rw-1b", trust_remote_code=True) | |
# Function to generate blog content | |
def generate_blog(topic, keywords): | |
prompt_template = f""" | |
You are a content writer. Write a poem of a maximum of 10 sentences on the following topic. | |
Topic: {topic} | |
Poem: | |
""" | |
input_ids = tokenizer(prompt_template, return_tensors="pt", max_length=512, truncation=True) | |
outputs = model.generate(input_ids["input_ids"], max_length=800, num_return_sequences=1) | |
blog_content = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return blog_content | |
# Gradio interface | |
iface = gr.Interface( | |
fn=generate_blog, | |
inputs=[ | |
gr.Textbox(lines=2, placeholder="Enter the poem topic", label="Blog Topic"), | |
], | |
outputs=gr.Textbox(label="Generated Poem"), | |
title="Poem Generator", | |
description="Generate a poem based on the providing a topic." | |
) | |
if __name__ == "__main__": | |
iface.launch(share=True) # Set share=True to generate a public link | |