Closed
Description
@Erland366 @Etherll , Hi , I used Orpheus_(3B) Kaggle notebook and when i executed cell trainer_stats = trainer.train()
it returns this error :
ValueError Traceback (most recent call last)
<ipython-input-8-3d62c575fcfd> in <cell line: 1>()
----> 1 trainer_stats = trainer.train()
/usr/local/lib/python3.10/dist-packages/transformers/trainer.py in train(self, resume_from_checkpoint, trial, ignore_keys_for_eval, **kwargs)
2243 hf_hub_utils.enable_progress_bars()
2244 else:
-> 2245 return inner_training_loop(
2246 args=args,
2247 resume_from_checkpoint=resume_from_checkpoint,
/usr/local/lib/python3.10/dist-packages/unsloth/models/llama.py in _fast_inner_training_loop(self, batch_size, args, resume_from_checkpoint, trial, ignore_keys_for_eval)
/usr/local/lib/python3.10/dist-packages/unsloth_zoo/loss_utils.py in _unsloth_get_batch_samples(self, epoch_iterator, num_batches, device, *args, **kwargs)
269 for _ in range(num_batches):
270 try:
--> 271 batch_samples += [next(epoch_iterator)]
272 except StopIteration:
273 break
/usr/local/lib/python3.10/dist-packages/accelerate/data_loader.py in __iter__(self)
550 # We iterate one batch ahead to check when we are at the end
551 try:
--> 552 current_batch = next(dataloader_iter)
553 except StopIteration:
554 yield
/usr/local/lib/python3.10/dist-packages/torch/utils/data/dataloader.py in __next__(self)
699 # TODO(https://github.com/pytorch/pytorch/issues/76750)
700 self._reset() # type: ignore[call-arg]
--> 701 data = self._next_data()
702 self._num_yielded += 1
703 if (
/usr/local/lib/python3.10/dist-packages/torch/utils/data/dataloader.py in _next_data(self)
755 def _next_data(self):
756 index = self._next_index() # may raise StopIteration
--> 757 data = self._dataset_fetcher.fetch(index) # may raise StopIteration
758 if self._pin_memory:
759 data = _utils.pin_memory.pin_memory(data, self._pin_memory_device)
/usr/local/lib/python3.10/dist-packages/torch/utils/data/_utils/fetch.py in fetch(self, possibly_batched_index)
53 else:
54 data = self.dataset[possibly_batched_index]
---> 55 return self.collate_fn(data)
/usr/local/lib/python3.10/dist-packages/transformers/data/data_collator.py in default_data_collator(features, return_tensors)
91
92 if return_tensors == "pt":
---> 93 return torch_default_data_collator(features)
94 elif return_tensors == "tf":
95 return tf_default_data_collator(features)
/usr/local/lib/python3.10/dist-packages/transformers/data/data_collator.py in torch_default_data_collator(features)
157 batch[k] = torch.from_numpy(np.stack([f[k] for f in features]))
158 else:
--> 159 batch[k] = torch.tensor([f[k] for f in features])
160
161 return batch
ValueError: expected sequence of length 715 at dim 1 (got 614)
Metadata
Metadata
Assignees
Labels
No labels