tweets_clone / app.py
Manasa1's picture
Update app.py
d62ca5c verified
raw
history blame
2.19 kB
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
# Load pre-trained model (or fine-tuned model)
model_name = "/kaggle/working/gpt-finetuned-qa" # Replace with the fine-tuned model name
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
# Function to generate tweets
def generate_tweet(input_text):
prompt = ("You are a tech-savvy, forward-thinking individual with a deep understanding of technology, innovation, and cultural trends. "
"Craft a tweet that reflects insightful commentary, wit, or actionable advice based on the following idea: \"{}\". "
"Ensure the response is concise, engaging, and suitable for a diverse audience on social media. "
"Incorporate elements of thought leadership, futuristic perspectives, and practical wisdom where appropriate.").format(input_text)
inputs = tokenizer(prompt, return_tensors="pt", max_length=512, truncation=True, padding=True)
outputs = model.generate(
inputs['input_ids'],
attention_mask=inputs['attention_mask'],
max_length=280,
num_return_sequences=1,
top_p=0.95,
top_k=50,
do_sample=True,
pad_token_id=tokenizer.pad_token_id
)
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Extract the tweet text (exclude prompt if included)
return generated_text.replace(prompt, "").strip()
# Gradio interface
def main():
with gr.Blocks() as interface:
gr.Markdown("""
# Tweet Generator
Enter a topic or idea, and the AI will craft a tweet inspired by innovative, philosophical, and tech-savvy thought leadership.
""")
with gr.Row():
input_text = gr.Textbox(label="Enter your idea or topic:")
output_tweet = gr.Textbox(label="Generated Tweet:", interactive=False)
generate_button = gr.Button("Generate Tweet")
generate_button.click(generate_tweet, inputs=[input_text], outputs=[output_tweet])
return interface
# Run Gradio app
if __name__ == "__main__":
app = main()
app.launch(share=True)