[docs]classSampler(Generic[T_co]):r"""Base class for all Samplers. Every Sampler subclass has to provide an :meth:`__iter__` method, providing a way to iterate over indices of dataset elements, and a :meth:`__len__` method that returns the length of the returned iterators. .. note:: The :meth:`__len__` method isn't strictly required by :class:`~torch.utils.data.DataLoader`, but is expected in any calculation involving the length of a :class:`~torch.utils.data.DataLoader`. """def__init__(self,data_source:Optional[Sized])->None:passdef__iter__(self)->Iterator[T_co]:raiseNotImplementedError
# NOTE [ Lack of Default `__len__` in Python Abstract Base Classes ]## Many times we have an abstract class representing a collection/iterable of# data, e.g., `torch.utils.data.Sampler`, with its subclasses optionally# implementing a `__len__` method. In such cases, we must make sure to not# provide a default implementation, because both straightforward default# implementations have their issues:## + `return NotImplemented`:# Calling `len(subclass_instance)` raises:# TypeError: 'NotImplementedType' object cannot be interpreted as an integer## + `raise NotImplementedError()`:# This prevents triggering some fallback behavior. E.g., the built-in# `list(X)` tries to call `len(X)` first, and executes a different code# path if the method is not found or `NotImplemented` is returned, while# raising an `NotImplementedError` will propagate and and make the call# fail where it could have use `__iter__` to complete the call.## Thus, the only two sensible things to do are## + **not** provide a default `__len__`.## + raise a `TypeError` instead, which is what Python uses when users call# a method that is not defined on an object.# (@ssnl verifies that this works on at least Python 3.7.)
[docs]classSequentialSampler(Sampler[int]):r"""Samples elements sequentially, always in the same order. Args: data_source (Dataset): dataset to sample from """data_source:Sizeddef__init__(self,data_source:Sized)->None:self.data_source=data_sourcedef__iter__(self)->Iterator[int]:returniter(range(len(self.data_source)))def__len__(self)->int:returnlen(self.data_source)
[docs]classRandomSampler(Sampler[int]):r"""Samples elements randomly. If without replacement, then sample from a shuffled dataset. If with replacement, then user can specify :attr:`num_samples` to draw. Args: data_source (Dataset): dataset to sample from replacement (bool): samples are drawn on-demand with replacement if ``True``, default=``False`` num_samples (int): number of samples to draw, default=`len(dataset)`. This argument is supposed to be specified only when `replacement` is ``True``. generator (Generator): Generator used in sampling. """data_source:Sizedreplacement:booldef__init__(self,data_source:Sized,replacement:bool=False,num_samples:Optional[int]=None,generator=None)->None:self.data_source=data_sourceself.replacement=replacementself._num_samples=num_samplesself.generator=generatorifnotisinstance(self.replacement,bool):raiseTypeError("replacement should be a boolean value, but got ""replacement={}".format(self.replacement))ifself._num_samplesisnotNoneandnotreplacement:raiseValueError("With replacement=False, num_samples should not be specified, ""since a random permute will be performed.")ifnotisinstance(self.num_samples,int)orself.num_samples<=0:raiseValueError("num_samples should be a positive integer ""value, but got num_samples={}".format(self.num_samples))@propertydefnum_samples(self)->int:# dataset size might change at runtimeifself._num_samplesisNone:returnlen(self.data_source)returnself._num_samplesdef__iter__(self)->Iterator[int]:n=len(self.data_source)ifself.generatorisNone:seed=int(torch.empty((),dtype=torch.int64).random_().item())generator=torch.Generator()generator.manual_seed(seed)else:generator=self.generatorifself.replacement:for_inrange(self.num_samples//32):yield fromtorch.randint(high=n,size=(32,),dtype=torch.int64,generator=generator).tolist()yield fromtorch.randint(high=n,size=(self.num_samples%32,),dtype=torch.int64,generator=generator).tolist()else:yield fromtorch.randperm(n,generator=generator).tolist()def__len__(self)->int:returnself.num_samples
[docs]classSubsetRandomSampler(Sampler[int]):r"""Samples elements randomly from a given list of indices, without replacement. Args: indices (sequence): a sequence of indices generator (Generator): Generator used in sampling. """indices:Sequence[int]def__init__(self,indices:Sequence[int],generator=None)->None:self.indices=indicesself.generator=generatordef__iter__(self)->Iterator[int]:foriintorch.randperm(len(self.indices),generator=self.generator):yieldself.indices[i]def__len__(self)->int:returnlen(self.indices)
[docs]classWeightedRandomSampler(Sampler[int]):r"""Samples elements from ``[0,..,len(weights)-1]`` with given probabilities (weights). Args: weights (sequence) : a sequence of weights, not necessary summing up to one num_samples (int): number of samples to draw replacement (bool): if ``True``, samples are drawn with replacement. If not, they are drawn without replacement, which means that when a sample index is drawn for a row, it cannot be drawn again for that row. generator (Generator): Generator used in sampling. Example: >>> list(WeightedRandomSampler([0.1, 0.9, 0.4, 0.7, 3.0, 0.6], 5, replacement=True)) [4, 4, 1, 4, 5] >>> list(WeightedRandomSampler([0.9, 0.4, 0.05, 0.2, 0.3, 0.1], 5, replacement=False)) [0, 1, 4, 3, 2] """weights:Tensornum_samples:intreplacement:booldef__init__(self,weights:Sequence[float],num_samples:int,replacement:bool=True,generator=None)->None:ifnotisinstance(num_samples,int)orisinstance(num_samples,bool)or \
num_samples<=0:raiseValueError("num_samples should be a positive integer ""value, but got num_samples={}".format(num_samples))ifnotisinstance(replacement,bool):raiseValueError("replacement should be a boolean value, but got ""replacement={}".format(replacement))self.weights=torch.as_tensor(weights,dtype=torch.double)self.num_samples=num_samplesself.replacement=replacementself.generator=generatordef__iter__(self)->Iterator[int]:rand_tensor=torch.multinomial(self.weights,self.num_samples,self.replacement,generator=self.generator)yield fromiter(rand_tensor.tolist())def__len__(self)->int:returnself.num_samples
[docs]classBatchSampler(Sampler[List[int]]):r"""Wraps another sampler to yield a mini-batch of indices. Args: sampler (Sampler or Iterable): Base sampler. Can be any iterable object batch_size (int): Size of mini-batch. drop_last (bool): If ``True``, the sampler will drop the last batch if its size would be less than ``batch_size`` Example: >>> list(BatchSampler(SequentialSampler(range(10)), batch_size=3, drop_last=False)) [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] >>> list(BatchSampler(SequentialSampler(range(10)), batch_size=3, drop_last=True)) [[0, 1, 2], [3, 4, 5], [6, 7, 8]] """def__init__(self,sampler:Sampler[int],batch_size:int,drop_last:bool)->None:# Since collections.abc.Iterable does not check for `__getitem__`, which# is one way for an object to be an iterable, we don't do an `isinstance`# check here.ifnotisinstance(batch_size,int)orisinstance(batch_size,bool)or \
batch_size<=0:raiseValueError("batch_size should be a positive integer value, ""but got batch_size={}".format(batch_size))ifnotisinstance(drop_last,bool):raiseValueError("drop_last should be a boolean value, but got ""drop_last={}".format(drop_last))self.sampler=samplerself.batch_size=batch_sizeself.drop_last=drop_lastdef__iter__(self)->Iterator[List[int]]:batch=[]foridxinself.sampler:batch.append(idx)iflen(batch)==self.batch_size:yieldbatchbatch=[]iflen(batch)>0andnotself.drop_last:yieldbatchdef__len__(self)->int:# Can only be called if self.sampler has __len__ implemented# We cannot enforce this condition, so we turn off typechecking for the# implementation below.# Somewhat related: see NOTE [ Lack of Default `__len__` in Python Abstract Base Classes ]ifself.drop_last:returnlen(self.sampler)//self.batch_size# type: ignore[arg-type]else:return(len(self.sampler)+self.batch_size-1)//self.batch_size# type: ignore[arg-type]
Docs
Access comprehensive developer documentation for PyTorch
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: Cookies Policy.