Skip to content

Instantly share code, notes, and snippets.

@AdityaKane2001
Created September 18, 2022 13:36
Show Gist options
  • Save AdityaKane2001/1d24c427317309e9e6bac0e2ed59a19d to your computer and use it in GitHub Desktop.
Save AdityaKane2001/1d24c427317309e9e6bac0e2ed59a19d to your computer and use it in GitHub Desktop.
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
/tmp/ipykernel_17/2958370653.py in <module>
1 for DATASET in POSSIBLE_DATASETS:
2 for MODEL_ALIAS in POSSIBLE_MODEL_ALIASES:
----> 3 train_eval_test(MODEL_ALIAS, DATASET)
/tmp/ipykernel_17/1094019785.py in train_eval_test(MODEL_ALIAS, DATASET)
35 model.train()
36 for batch in train_dl:
---> 37 outputs = model(batch)
38
39 loss = loss_fn(outputs, batch[1].to(device))
/opt/conda/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
/tmp/ipykernel_17/3524776035.py in forward(self, batch)
21 k: v.to(self.device) for k, v in tokenized.items()
22 }
---> 23 x = self.backbone(**tokenized)
24 x = self.lin(x.pooler_output)
25 return x
/opt/conda/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
/opt/conda/lib/python3.7/site-packages/transformers/models/bert/modeling_bert.py in forward(self, input_ids, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, encoder_hidden_states, encoder_attention_mask, past_key_values, use_cache, output_attentions, output_hidden_states, return_dict)
1014 token_type_ids=token_type_ids,
1015 inputs_embeds=inputs_embeds,
-> 1016 past_key_values_length=past_key_values_length,
1017 )
1018 encoder_outputs = self.encoder(
/opt/conda/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
/opt/conda/lib/python3.7/site-packages/transformers/models/bert/modeling_bert.py in forward(self, input_ids, token_type_ids, position_ids, inputs_embeds, past_key_values_length)
233
234 if inputs_embeds is None:
--> 235 inputs_embeds = self.word_embeddings(input_ids)
236 token_type_embeddings = self.token_type_embeddings(token_type_ids)
237
/opt/conda/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
1108 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1109 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1110 return forward_call(*input, **kwargs)
1111 # Do not call functions when jit is used
1112 full_backward_hooks, non_full_backward_hooks = [], []
/opt/conda/lib/python3.7/site-packages/torch/nn/modules/sparse.py in forward(self, input)
158 return F.embedding(
159 input, self.weight, self.padding_idx, self.max_norm,
--> 160 self.norm_type, self.scale_grad_by_freq, self.sparse)
161
162 def extra_repr(self) -> str:
/opt/conda/lib/python3.7/site-packages/torch/nn/functional.py in embedding(input, weight, padding_idx, max_norm, norm_type, scale_grad_by_freq, sparse)
2181 # remove once script supports set_grad_enabled
2182 _no_grad_embedding_renorm_(weight, input, max_norm, norm_type)
-> 2183 return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)
2184
2185
IndexError: index out of range in self
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment