Spaces:
Runtime error
Runtime error
File size: 3,242 Bytes
06a2ca5 581eff5 c34df38 581eff5 06a2ca5 9ad974d c34df38 581eff5 c34df38 06a2ca5 c34df38 06a2ca5 c34df38 06a2ca5 c34df38 06a2ca5 c34df38 06a2ca5 c34df38 9ad974d c34df38 06a2ca5 c34df38 06a2ca5 c34df38 06a2ca5 c34df38 06a2ca5 c34df38 06a2ca5 c34df38 581eff5 06a2ca5 9ad974d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
import os
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
import spaces
# Create the necessary directories
os.makedirs('.gradio/cached_examples/17', exist_ok=True)
def get_model_name(language):
"""Map language choice to the corresponding model."""
model_mapping = {
"English": "microsoft/Phi-3-mini-4k-instruct",
"Arabic": "ALLaM-AI/ALLaM-7B-Instruct-preview"
}
return model_mapping.get(language, "ALLaM-AI/ALLaM-7B-Instruct-preview") # Default to Arabic model
def load_model(model_name):
device = "cuda" if torch.cuda.is_available() else "cpu"
model = AutoModelForCausalLM.from_pretrained(
model_name,
device_map=device,
torch_dtype="auto",
trust_remote_code=True,
)
tokenizer = AutoTokenizer.from_pretrained(model_name)
generator = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
return_full_text=False,
max_new_tokens=500,
do_sample=True, # Enable sampling for more creative outputs
top_k=50, # Control diversity
top_p=0.95 # Control diversity
)
return generator
@spaces.GPU
def generate_kids_story(character, setting, language):
model_name = get_model_name(language)
generator = load_model(model_name)
# Define prompt for the AI model
if language == "English":
prompt = (f"Write a short story for kids about a character named {character} who goes on an adventure in {setting}. "
"Make it fun, engaging, and suitable for children.")
else:
prompt = (f"اكتب قصة قصيرة للأطفال عن شخصية اسمها {character} التي تذهب في مغامرة في {setting}. "
"اجعلها ممتعة وجذابة ومناسبة للأطفال.")
messages = [{"role": "user", "content": prompt}]
output = generator(messages)
return output[0]["generated_text"]
import gc
import torch
# Delete model and associated objects
del model
del tokenizer
del generator
# Run garbage collection
gc.collect ()
# Empty CUDA cache
torch.cuda.empty_cache()
# Create Gradio interface
demo = gr.Interface(
fn=generate_kids_story,
inputs=[
gr.Textbox(placeholder="Enter a character name (e.g., Benny the Bunny)...", label="Character Name"),
gr.Textbox(placeholder="Enter a setting (e.g., a magical forest)...", label="Setting"),
gr.Dropdown(
choices=["English", "Arabic"],
label="Choose Language",
value="English" # Default to English
)
],
outputs=gr.Textbox(label="Kids' Story"),
title="📖 AI Kids' Story Generator - English & Arabic 📖",
description="Enter a character name and a setting, and AI will generate a fun short story for kids in English or Arabic.",
examples=[
["Benny the Bunny", "a magical forest", "English"],
["علي البطل", "غابة سحرية", "Arabic"],
["Lila the Ladybug", "a garden full of flowers", "English"],
["ليلى الجنية", "حديقة مليئة بالأزهار", "Arabic"]
],
theme="default",
)
# Launch the Gradio app
demo.launch() |