Translator-API / app.py
Lenylvt's picture
Update app.py
ca6492d verified
raw
history blame
1.32 kB
from huggingface_hub import InferenceApi
import gradio as gr
# Note: Replace "your_hugging_face_api_key" with your actual API key.
client = InferenceApi("mistralai/Mixtral-8x7B-Instruct-v0.1")
def translate_text(input_text, target_language):
prompt = f"Translate the following text into {target_language}: {input_text}"
try:
# Adjusted to use a hypothetical 'generate' or similar method.
# You'll need to replace this with the actual method for sending inference requests.
response = client.generate(inputs=prompt)
# The response structure depends on the model and the API's current design.
# You may need to adjust how you extract the translated text from the response.
translated_text = response['generated_text'] if 'generated_text' in response else "Translation error or model response format has changed."
except Exception as e:
translated_text = f"Error: {str(e)}"
return translated_text
iface = gr.Interface(
fn=translate_text,
inputs=[gr.Textbox(label="Text to Translate"), gr.Textbox(label="Target Language")],
outputs=gr.Textbox(label="Translated Text"),
title="Simple Translator with Mixtral",
description="Translate text to your specified language using the Mixtral model from Hugging Face."
)
iface.launch()