runtime error
Exit code: 1. Reason: h/utils/_contextlib.py", line 120, in decorate_context return func(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/sentence_transformers/SentenceTransformer.py", line 1094, in encode out_features = self.forward(features, **kwargs) File "/usr/local/lib/python3.10/site-packages/sentence_transformers/SentenceTransformer.py", line 1175, in forward input = module(input, **module_kwargs) File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1773, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1784, in _call_impl return forward_call(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/sentence_transformers/models/Transformer.py", line 261, in forward outputs = self.auto_model(**trans_features, **kwargs, return_dict=True) File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1773, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1784, in _call_impl return forward_call(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 966, in forward extended_attention_mask = _prepare_4d_attention_mask_for_sdpa( File "/usr/local/lib/python3.10/site-packages/transformers/modeling_attn_mask_utils.py", line 457, in _prepare_4d_attention_mask_for_sdpa return AttentionMaskConverter._expand_mask(mask=mask, dtype=dtype, tgt_len=tgt_len) File "/usr/local/lib/python3.10/site-packages/transformers/modeling_attn_mask_utils.py", line 196, in _expand_mask inverted_mask = torch.tensor(1.0, dtype=dtype) - expanded_mask File "/usr/local/lib/python3.10/site-packages/spaces/zero/torch/patching.py", line 227, in __torch_function__ raise RuntimeError( RuntimeError: Expected all tensors to be on the same device, but found at least two devices, cuda:0 (ZeroGPU) and cpu!
Container logs:
Fetching error logs...