Spaces:
Build error
Build error
Commit
·
207c30a
1
Parent(s):
79aabbe
fr
Browse files
app.py
CHANGED
|
@@ -61,8 +61,8 @@ def model_inference(
|
|
| 61 |
text = f"{assistant_prefix} {text}"
|
| 62 |
|
| 63 |
|
| 64 |
-
prompt =
|
| 65 |
-
inputs =
|
| 66 |
inputs = {k: v.to("cuda") for k, v in inputs.items()}
|
| 67 |
|
| 68 |
generation_args = {
|
|
@@ -86,9 +86,9 @@ def model_inference(
|
|
| 86 |
generation_args.update(inputs)
|
| 87 |
|
| 88 |
# Generate
|
| 89 |
-
generated_ids =
|
| 90 |
|
| 91 |
-
generated_texts =
|
| 92 |
return generated_texts[0]
|
| 93 |
|
| 94 |
# Load model
|
|
|
|
| 61 |
text = f"{assistant_prefix} {text}"
|
| 62 |
|
| 63 |
|
| 64 |
+
prompt = id_processor.apply_chat_template(resulting_messages, add_generation_prompt=True)
|
| 65 |
+
inputs = id_processor(text=prompt, images=[images], return_tensors="pt")
|
| 66 |
inputs = {k: v.to("cuda") for k, v in inputs.items()}
|
| 67 |
|
| 68 |
generation_args = {
|
|
|
|
| 86 |
generation_args.update(inputs)
|
| 87 |
|
| 88 |
# Generate
|
| 89 |
+
generated_ids = id_model.generate(**generation_args)
|
| 90 |
|
| 91 |
+
generated_texts = id_processor.batch_decode(generated_ids[:, generation_args["input_ids"].size(1):], skip_special_tokens=True)
|
| 92 |
return generated_texts[0]
|
| 93 |
|
| 94 |
# Load model
|