TiberiuCristianLeon commited on
Commit
2e5046d
·
verified ·
1 Parent(s): b9520bd

Update src/translate/Translate.py

Browse files
Files changed (1) hide show
  1. src/translate/Translate.py +13 -3
src/translate/Translate.py CHANGED
@@ -53,9 +53,19 @@ def paraphraseTranslateMethod(requestValue: str, model: str):
53
 
54
  return " ".join(result_value).strip(), model
55
 
56
- def gemma(requestValue: str, model: str = 'Gargaz/gemma-2b-romanian-better'):
57
- pipe = pipeline("text-generation", model="Gargaz/gemma-2b-romanian-better", device = -1, max_new_tokens = 512, do_sample = True, temperature = 0.2)
58
  messages = [
59
  {"role": "user", "content": f"Translate this to Romanian using a formal tone. Only return the translated text: {requestValue}"},
60
  ]
61
- return pipe(messages, num_return_sequences=1, return_full_text=False), model
 
 
 
 
 
 
 
 
 
 
 
53
 
54
  return " ".join(result_value).strip(), model
55
 
56
+ def gemma(requestValue: str, model: str = 'Gargaz/gemma-2b-romanian-better'):
57
+ prompt = f"Translate this to Romanian using a formal tone. Only return the translation:\n{requestValue}"
58
  messages = [
59
  {"role": "user", "content": f"Translate this to Romanian using a formal tone. Only return the translated text: {requestValue}"},
60
  ]
61
+ pipe = pipeline(
62
+ "text-generation",
63
+ model=model,
64
+ device=-1,
65
+ max_new_tokens=512, # Keep short to reduce verbosity
66
+ do_sample=False, # Use greedy decoding for determinism
67
+ temperature=0.7 # Raise slightly to avoid dull output
68
+ )
69
+
70
+ output = pipe(prompt, num_return_sequences=1, return_full_text=False)
71
+ return output[0]["generated_text"].strip(), model