pravin007s commited on
Commit
20ff71e
·
verified ·
1 Parent(s): 38ab713

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -22
app.py CHANGED
@@ -10,20 +10,17 @@ model_name = "Helsinki-NLP/opus-mt-mul-en"
10
  tokenizer = MarianTokenizer.from_pretrained(model_name)
11
  model = MarianMTModel.from_pretrained(model_name)
12
 
13
- # Load GPT-Neo model for creative text generation
14
  gpt_neo_model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B")
15
  gpt_neo_tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-1.3B")
16
 
17
- # Define language map (including new languages)
18
  language_map = {
19
  "Tamil": "ta",
20
- "Russian": "rus",
21
- "Arabic": "ar",
22
- "Portuguese": "pt"
23
  }
24
 
25
  def translate_text(input_text, selected_languages):
26
- """Translate input text into English based on the selected language."""
27
  if not selected_languages:
28
  return "Please select at least one language."
29
 
@@ -32,16 +29,13 @@ def translate_text(input_text, selected_languages):
32
  lang_prefix = f">>{lang_code}<< "
33
  text_with_lang = lang_prefix + input_text
34
  inputs = tokenizer(text_with_lang, return_tensors="pt", padding=True)
35
-
36
- # Generate translated tokens
37
  translated_tokens = model.generate(**inputs)
38
  translation = tokenizer.decode(translated_tokens[0], skip_special_tokens=True)
39
  return translation
40
 
41
  def generate_image(prompt):
42
- """Generate an image based on the provided prompt."""
43
  API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-dev"
44
- hf_token = os.getenv("HF_TOKEN") # Ensure to set this environment variable
45
  headers = {"Authorization": f"Bearer {hf_token}"}
46
 
47
  response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
@@ -57,17 +51,13 @@ def generate_image(prompt):
57
  return None
58
 
59
  def generate_creative_text(translated_text):
60
- """Generate creative text based on the translated sentence."""
61
- prompt = f"Create a creative text based on the following sentence: {translated_text}"
62
  inputs = gpt_neo_tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=100)
63
-
64
- # Generate creative text
65
- output = gpt_neo_model.generate(inputs["input_ids"], max_length=100, do_sample=True, temperature=0.7)
66
  creative_text = gpt_neo_tokenizer.decode(output[0], skip_special_tokens=True)
67
  return creative_text
68
 
69
  def process_input(text_input, selected_languages):
70
- """Process the input text: translate, generate creative text, and generate an image."""
71
  translated_output = translate_text(text_input, selected_languages)
72
  creative_text = generate_creative_text(translated_output)
73
  image = generate_image(translated_output)
@@ -77,16 +67,16 @@ def process_input(text_input, selected_languages):
77
  interface = gr.Interface(
78
  fn=process_input,
79
  inputs=[
80
- gr.Textbox(label="Input Text"),
81
- gr.CheckboxGroup(choices=["Tamil", "Russian", "Arabic", "Portuguese"], label="Select Language")
82
  ],
83
  outputs=[
84
- gr.Textbox(label="Translated Text"),
85
- gr.Textbox(label="Creative Text"),
86
  gr.Image(label="Generated Image")
87
  ],
88
- title="Multilingual Translation, Creative Text, and Image Generation",
89
- description="Translate Tamil, Russian, Arabic, or Portuguese text to English, generate creative text, and generate an image."
90
  )
91
 
92
  interface.launch()
 
10
  tokenizer = MarianTokenizer.from_pretrained(model_name)
11
  model = MarianMTModel.from_pretrained(model_name)
12
 
13
+ # Load GPT-Neo model for creative content generation
14
  gpt_neo_model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B")
15
  gpt_neo_tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-1.3B")
16
 
17
+ # Define language map
18
  language_map = {
19
  "Tamil": "ta",
20
+ "Russian": "rus"
 
 
21
  }
22
 
23
  def translate_text(input_text, selected_languages):
 
24
  if not selected_languages:
25
  return "Please select at least one language."
26
 
 
29
  lang_prefix = f">>{lang_code}<< "
30
  text_with_lang = lang_prefix + input_text
31
  inputs = tokenizer(text_with_lang, return_tensors="pt", padding=True)
 
 
32
  translated_tokens = model.generate(**inputs)
33
  translation = tokenizer.decode(translated_tokens[0], skip_special_tokens=True)
34
  return translation
35
 
36
  def generate_image(prompt):
 
37
  API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-dev"
38
+ hf_token = os.getenv("HF_TOKEN")
39
  headers = {"Authorization": f"Bearer {hf_token}"}
40
 
41
  response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
 
51
  return None
52
 
53
  def generate_creative_text(translated_text):
54
+ prompt = f"Create a creative story based on the following sentence: {translated_text}"
 
55
  inputs = gpt_neo_tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=100)
56
+ output = gpt_neo_model.generate(inputs["input_ids"], max_length=150, do_sample=True, temperature=0.7)
 
 
57
  creative_text = gpt_neo_tokenizer.decode(output[0], skip_special_tokens=True)
58
  return creative_text
59
 
60
  def process_input(text_input, selected_languages):
 
61
  translated_output = translate_text(text_input, selected_languages)
62
  creative_text = generate_creative_text(translated_output)
63
  image = generate_image(translated_output)
 
67
  interface = gr.Interface(
68
  fn=process_input,
69
  inputs=[
70
+ gr.Textbox(label="Input Text"),
71
+ gr.CheckboxGroup(choices=["Tamil", "Russian"], label="Select Language")
72
  ],
73
  outputs=[
74
+ gr.Textbox(label="Translated Text"),
75
+ gr.Textbox(label="Creative Text"),
76
  gr.Image(label="Generated Image")
77
  ],
78
+ title="Multilingual Translation and Image Generation",
79
+ description="Translate Tamil or Russian text to English, generate creative content, and create an image."
80
  )
81
 
82
  interface.launch()