Akbartus commited on
Commit
64115b7
·
1 Parent(s): 8d2ad89

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -4
app.py CHANGED
@@ -23,18 +23,23 @@ def shot(image, labels_text):
23
  labels = labels_text.split(",")
24
 
25
 
26
-
 
 
 
 
 
27
 
28
  res = pipe(images=PIL_image,
29
  candidate_labels=labels,
30
  hypothesis_template= "This is a photo of a {}")
31
- return {dic["label"]: dic["score"] for dic in res}
32
- # Translate
33
  tokenizer.src_lang = "en"
34
  encodedText = tokenizer(label, return_tensors="pt")
35
  generatedTokens = model.generate(**encodedText, forced_bos_token_id=tokenizer.get_lang_id("ru"))
36
  return tokenizer.batch_decode(generatedTokens, skip_special_tokens=True)[0]
37
-
 
38
 
39
  iface = gr.Interface(shot,
40
  ["image", "text"],
 
23
  labels = labels_text.split(",")
24
 
25
 
26
+ # Translate
27
+ tokenizer.src_lang = "en"
28
+ encodedText = tokenizer(label, return_tensors="pt")
29
+ generatedTokens = model.generate(**encodedText, forced_bos_token_id=tokenizer.get_lang_id("ru"))
30
+ return tokenizer.batch_decode(generatedTokens, skip_special_tokens=True)[0]
31
+
32
 
33
  res = pipe(images=PIL_image,
34
  candidate_labels=labels,
35
  hypothesis_template= "This is a photo of a {}")
36
+ return {dic["label"]: dic["score"] for dic in res
 
37
  tokenizer.src_lang = "en"
38
  encodedText = tokenizer(label, return_tensors="pt")
39
  generatedTokens = model.generate(**encodedText, forced_bos_token_id=tokenizer.get_lang_id("ru"))
40
  return tokenizer.batch_decode(generatedTokens, skip_special_tokens=True)[0]
41
+ }
42
+
43
 
44
  iface = gr.Interface(shot,
45
  ["image", "text"],