Getting error on last block; not sure if this is related... nlp-with-transformers/notebooks#31
`---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
Untitled-1.ipynb Cell 5' in <cell line: 2>()
1 import torch.nn.functional as F
----> 2 z_logits = enc(x)
3 z = torch.argmax(z_logits, axis=1)
4 z = F.one_hot(z, num_classes=enc.vocab_size).permute(0, 3, 1, 2).float()
File c:\Users\aaken.venv\lib\site-packages\torch\nn\modules\module.py:1110, in Module._call_impl(self, *input, **kwargs)
1106 # If we don't have any hooks, we want to skip the rest of the logic in
1107 # this function, and just call forward.
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
File c:\Users\aaken.venv\lib\site-packages\dall_e\encoder.py:93, in Encoder.forward(self, x)
90 if x.dtype != torch.float32:
91 raise ValueError('input must have dtype torch.float32')
---> 93 return self.blocks(x)
File c:\Users\aaken.venv\lib\site-packages\torch\nn\modules\module.py:1110, in Module._call_impl(self, *input, **kwargs)
1106 # If we don't have any hooks, we want to skip the rest of the logic in
1107 # this function, and just call forward.
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
File c:\Users\aaken.venv\lib\site-packages\torch\nn\modules\container.py:141, in Sequential.forward(self, input)
139 def forward(self, input):
140 for module in self:
--> 141 input = module(input)
142 return input
File c:\Users\aaken.venv\lib\site-packages\torch\nn\modules\module.py:1110, in Module._call_impl(self, *input, **kwargs)
1106 # If we don't have any hooks, we want to skip the rest of the logic in
1107 # this function, and just call forward.
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
File c:\Users\aaken.venv\lib\site-packages\dall_e\utils.py:43, in Conv2d.forward(self, x)
39 x = x.float()
41 w, b = self.w, self.b
---> 43 return F.conv2d(x, w, b, padding=(self.kw - 1) // 2)
RuntimeError: Expected all tensors to be on the same device, but found at least two devices, cpu and cuda:0! (when checking argument for argument weight in method wrapper___slow_conv2d_forward)`