halme commited on
Commit
f7ea956
·
1 Parent(s): 97a5cad

No CUDA support in hf

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -46,11 +46,11 @@ def respond(message, history: list[tuple[str, str]], system_message, max_tokens,
46
  tokenize = True,
47
  add_generation_prompt = True, # Must add for generation
48
  return_tensors = "pt",
49
- ).to("cuda")
50
 
51
  from transformers import TextStreamer
52
  text_streamer = TextStreamer(tokenizer, skip_prompt = True)
53
-
54
  yield model.generate(input_ids = inputs, streamer = text_streamer, max_new_tokens = 128,
55
  use_cache = True, temperature = 1.5, min_p = 0.1)
56
 
 
46
  tokenize = True,
47
  add_generation_prompt = True, # Must add for generation
48
  return_tensors = "pt",
49
+ )
50
 
51
  from transformers import TextStreamer
52
  text_streamer = TextStreamer(tokenizer, skip_prompt = True)
53
+
54
  yield model.generate(input_ids = inputs, streamer = text_streamer, max_new_tokens = 128,
55
  use_cache = True, temperature = 1.5, min_p = 0.1)
56