transart / app.py
pravin007s's picture
Update app.py
f34d5e4 verified
raw
history blame
3.36 kB
# -*- coding: utf-8 -*-
"""gen ai project f.ipynb
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1iF7hdOjWNeFUtGvUYdaFsBErJGnY1h5J
"""
import os
from transformers import MarianMTModel, MarianTokenizer, GPTNeoForCausalLM, AutoTokenizer
import gradio as gr
from PIL import Image, UnidentifiedImageError
import requests
import io
# Load translation model
model_name = "Helsinki-NLP/opus-mt-mul-en"
tokenizer = MarianTokenizer.from_pretrained(model_name)
model = MarianMTModel.from_pretrained(model_name)
# Load GPT-Neo model for creative text generation
gpt_neo_model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B")
gpt_neo_tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-1.3B")
# Define language map (including new languages)
language_map = {
"Tamil": "ta",
"Russian": "rus",
"Arabic": "ar",
"Portuguese": "pt"
}
def translate_text(input_text, selected_languages):
if not selected_languages:
return "Please select at least one language."
selected_language = selected_languages[0] # Pick the first selected language
lang_code = language_map[selected_language]
lang_prefix = f">>{lang_code}<< "
text_with_lang = lang_prefix + input_text
inputs = tokenizer(text_with_lang, return_tensors="pt", padding=True)
translated_tokens = model.generate(**inputs)
translation = tokenizer.decode(translated_tokens[0], skip_special_tokens=True)
return translation
def generate_image(prompt):
API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-dev"
hf_token = os.getenv("HF_TOKEN")
headers = {"Authorization": f"Bearer {hf_token}"}
response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
if response.status_code == 200:
image_bytes = response.content
try:
image = Image.open(io.BytesIO(image_bytes))
return image
except UnidentifiedImageError:
return None
else:
return None
def generate_creative_text(translated_text):
prompt = f"Create a creative text based on the following sentence: {translated_text}"
inputs = gpt_neo_tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=100)
output = gpt_neo_model.generate(inputs["input_ids"], max_length=100, do_sample=True, temperature=0.7)
creative_text = gpt_neo_tokenizer.decode(output[0], skip_special_tokens=True)
return creative_text
def process_input(text_input, selected_languages):
translated_output = translate_text(text_input, selected_languages)
creative_text = generate_creative_text(translated_output)
image = generate_image(translated_output)
return translated_output, creative_text, image
# Gradio interface
interface = gr.Interface(
fn=process_input,
inputs=[gr.Textbox(label="Input Text"), gr.CheckboxGroup(choices=["Tamil", "Russian", "Arabic", "Portuguese"], label="Select Language")],
outputs=[gr.Textbox(label="Translated Text"), gr.Textbox(label="Creative Text"), gr.Image(label="Generated Image")],
title="Multilingual Translation, Creative Text, and Image Generation",
description="Translate Tamil, Russian, Arabic, or Portuguese text to English, generate creative text, and generate an image."
)
interface.launch()