Skip to content

Update pytorch_data_loader_with_multiple_workers.py #86

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -37,25 +37,32 @@ def pytorch_data_loader_with_multiple_workers_noncompliant():

# {fact [email protected] defects=0}
def pytorch_data_loader_with_multiple_workers_compliant(args):
import torch.optim
import torchvision.datasets as datasets
# Data loading code
traindir = os.path.join(args.data, 'train')
valdir = os.path.join(args.data, 'val')
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])

train_dataset = datasets.ImageFolder(traindir, imagenet_transforms)
train_sampler = torch.utils.data.distributed\
.DistributedSampler(train_dataset)

# Compliant: args.workers value is assigned to num_workers,
# but native python 'list/dict' is not used here to store the dataset.
train_loader = torch.utils.data.DataLoader(train_dataset,
batch_size=args.batch_size,
shuffle=(train_sampler is None),
num_workers=args.workers,
pin_memory=True,
sampler=train_sampler)
from torch.utils.data import Dataset, DataLoader
import numpy as np
import torch

class DataIter(Dataset):
def __init__(self):
self.data_np = np.array([x for x in range(24000000)])

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can that data be more realistic rather than just numbers? e.g. path to image files in a folder.


def __len__(self):
return len(self.data_np)

def __getitem__(self, idx):
data = self.data_np[idx]
data = np.array([data], dtype=np.int64)
return torch.tensor(data)

train_data = DataIter()
# Compliant: native python `list/dict` is not used to store the dataset
# for non zero `num_workers`.
train_loader = DataLoader(train_data, batch_size=300,
shuffle=True,
drop_last=True,
pin_memory=False,
num_workers=8)

for i, item in enumerate(train_loader):
if i % 1000 == 0:
print(i)
# {/fact}