Skip to content

Instantly share code, notes, and snippets.

@ikanez
Created February 5, 2018 19:19
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ikanez/ea4224f46102270aaf40e77396179a72 to your computer and use it in GitHub Desktop.
Save ikanez/ea4224f46102270aaf40e77396179a72 to your computer and use it in GitHub Desktop.
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-37-16682764c468> in <module>()
----> 1 fit(m, md, 1, lo.opt, F.binary_cross_entropy)
2 # use F.binary_cross_entropy for multi-label problems
~\Dropbox\3.SelfStudy\fastai_pytorch\fastai\courses\dl1\fastai\model.py in fit(model, data, epochs, opt, crit, metrics, callbacks, **kwargs)
104 i += 1
105
--> 106 vals = validate(stepper, data.val_dl, metrics)
107 if epoch == 0: print(layout.format(*names))
108 print_stats(epoch, [debias_loss] + vals)
~\Dropbox\3.SelfStudy\fastai_pytorch\fastai\courses\dl1\fastai\model.py in validate(stepper, dl, metrics)
123 loss,res = [],[]
124 stepper.reset(False)
--> 125 for (*x,y) in iter(dl):
126 preds,l = stepper.evaluate(VV(x), VV(y))
127 loss.append(to_np(l))
~\Dropbox\3.SelfStudy\fastai_pytorch\fastai\courses\dl1\fastai\dataset.py in __next__(self)
241 if self.i>=len(self.dl): raise StopIteration
242 self.i+=1
--> 243 return next(self.it)
244
245 @property
<ipython-input-20-c08f5408c833> in __iter__(self)
49 it = iter(self.src)
50 for i in range(len(self)):
---> 51 b = next(it)
52
53 if (len(self.y_flds) > 1):
~\Dropbox\3.SelfStudy\ml_lab\Anaconda3\envs\fastai\lib\site-packages\torchtext\data\iterator.py in __iter__(self)
178 minibatch.sort(key=self.sort_key, reverse=True)
179 yield Batch(minibatch, self.dataset, self.device,
--> 180 self.train)
181 if not self.repeat:
182 raise StopIteration
~\Dropbox\3.SelfStudy\ml_lab\Anaconda3\envs\fastai\lib\site-packages\torchtext\data\batch.py in __init__(self, data, dataset, device, train)
20 if field is not None:
21 batch = [x.__dict__[name] for x in data]
---> 22 setattr(self, name, field.process(batch, device=device, train=train))
23
24 @classmethod
~\Dropbox\3.SelfStudy\ml_lab\Anaconda3\envs\fastai\lib\site-packages\torchtext\data\field.py in process(self, batch, device, train)
185 """
186 padded = self.pad(batch)
--> 187 tensor = self.numericalize(padded, device=device, train=train)
188 return tensor
189
~\Dropbox\3.SelfStudy\ml_lab\Anaconda3\envs\fastai\lib\site-packages\torchtext\data\field.py in numericalize(self, arr, device, train)
307 arr = self.postprocessing(arr, None, train)
308
--> 309 arr = self.tensor_type(arr)
310 if self.sequential and not self.batch_first:
311 arr.t_()
RuntimeError: tried to construct a tensor from a int sequence, but found an item of type float at index (0)
@ikanez
Copy link
Author

ikanez commented Mar 3, 2018

Update on the issue. A probable solution as mentioned by a commenter here (facebookresearch/InferSent#2)

When you define a Field in torchtext by default it is torch.LongTensor , so you can pass whatever type you want like this data.Field(sequential=False, use_vocab=False,tensor_type=torch.cuda.FloatTensor)

So happens when I changed my code accordingly as per his suggestion (below), the error no longer appears.
tt_TEXT = data.Field(sequential=True, tokenize=tokenizer, fix_length=max_len)
tt_LABEL = data.Field(sequential=False, use_vocab=False,tensor_type=torch.cuda.FloatTensor)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment