File tree Expand file tree Collapse file tree 1 file changed +5
-0
lines changed Expand file tree Collapse file tree 1 file changed +5
-0
lines changed Original file line number Diff line number Diff line change 1111from torch import Tensor
1212from torch .utils .data import DataLoader
1313
14+ from datasets import is_caching_enabled
1415from pytorch_ie .core .document import Document
1516from pytorch_ie .core .model import PyTorchIEModel
1617from pytorch_ie .core .taskmodule import (
@@ -390,6 +391,10 @@ def __call__(
390391 batched = True ,
391392 ** dataset_map_params ,
392393 )
394+ # For now, we do not allow caching of pipeline results since fingerprinting may be incorrect
395+ # TODO: elaborate why it may be incorrect
396+ if is_caching_enabled () and documents ._fingerprint == processed_documents ._fingerprint :
397+ raise Exception ("Caching is not allowed for pipeline calls" )
393398 else :
394399 processed_documents = self ._process_documents (
395400 documents = documents ,
You can’t perform that action at this time.
0 commit comments