Spaces:
Running
on
Zero
Running
on
Zero
update app
Browse files
app.py
CHANGED
|
@@ -109,7 +109,7 @@ css = """
|
|
| 109 |
"""
|
| 110 |
|
| 111 |
MAX_MAX_NEW_TOKENS = 4096
|
| 112 |
-
DEFAULT_MAX_NEW_TOKENS =
|
| 113 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
| 114 |
|
| 115 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
@@ -223,9 +223,7 @@ def generate_image(model_name: str, text: str, image: Image.Image,
|
|
| 223 |
messages = [{"role": "user", "content": [{"type": "image"}, {"type": "text", "text": text}]}]
|
| 224 |
prompt_full = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 225 |
inputs = processor(
|
| 226 |
-
text=[prompt_full], images=[image], return_tensors="pt", padding=True
|
| 227 |
-
truncation=True, max_length=MAX_INPUT_TOKEN_LENGTH
|
| 228 |
-
).to(device)
|
| 229 |
streamer = TextIteratorStreamer(processor, skip_prompt=True, skip_special_tokens=True)
|
| 230 |
generation_kwargs = {**inputs, "streamer": streamer, "max_new_tokens": max_new_tokens}
|
| 231 |
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|
|
|
|
| 109 |
"""
|
| 110 |
|
| 111 |
MAX_MAX_NEW_TOKENS = 4096
|
| 112 |
+
DEFAULT_MAX_NEW_TOKENS = 1024
|
| 113 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
| 114 |
|
| 115 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
|
| 223 |
messages = [{"role": "user", "content": [{"type": "image"}, {"type": "text", "text": text}]}]
|
| 224 |
prompt_full = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 225 |
inputs = processor(
|
| 226 |
+
text=[prompt_full], images=[image], return_tensors="pt", padding=True).to(device)
|
|
|
|
|
|
|
| 227 |
streamer = TextIteratorStreamer(processor, skip_prompt=True, skip_special_tokens=True)
|
| 228 |
generation_kwargs = {**inputs, "streamer": streamer, "max_new_tokens": max_new_tokens}
|
| 229 |
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|