Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
from dotenv import load_dotenv | |
import os | |
# Load environment variables | |
load_dotenv() | |
# Load the model and tokenizer | |
#tokenizer = AutoTokenizer.from_pretrained("google/gemma-2b") | |
#model = AutoModelForCausalLM.from_pretrained("google/gemma-2b") | |
tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-rw-1b", trust_remote_code=True) | |
model = AutoModelForCausalLM.from_pretrained("tiiuae/falcon-rw-1b", trust_remote_code=True) | |
# Function to generate blog content | |
def generate_blog(topic, keywords): | |
prompt_template = f""" | |
You are a technical content writer. Write a detailed and informative blog on the following topic. | |
Topic: {topic} | |
Keywords: {keywords} | |
Make sure the blog covers the following sections: | |
1. Introduction | |
2. Detailed Explanation | |
3. Examples | |
4. Conclusion | |
Blog: | |
""" | |
input_ids = tokenizer(prompt_template, return_tensors="pt", max_length=512, truncation=True) | |
outputs = model.generate(input_ids["input_ids"], max_length=800, num_return_sequences=1) | |
blog_content = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return blog_content | |
# Gradio interface | |
iface = gr.Interface( | |
fn=generate_blog, | |
inputs=[ | |
gr.Textbox(lines=2, placeholder="Enter the blog topic", label="Blog Topic"), | |
gr.Textbox(lines=2, placeholder="Enter keywords (comma-separated)", label="Keywords") | |
], | |
outputs=gr.Textbox(label="Generated Blog Content"), | |
title="Technical Blog Generator", | |
description="Generate a detailed technical blog by providing a topic and relevant keywords." | |
) | |
if __name__ == "__main__": | |
iface.launch(share=True) # Set share=True to generate a public link | |