importwarningsfromcollectionsimportOrderedDict,abcascontainer_abcsfromitertoolsimportisliceimportoperatorimporttorchfrom.moduleimportModulefromtorch._jit_internalimport_copy_to_script_wrapperfromtypingimportAny,Dict,Iterable,Iterator,Mapping,Optional,TYPE_CHECKING,overload,Tuple,TypeVar,UnionifTYPE_CHECKING:fromtorch.nnimportParameterT=TypeVar('T',bound=Module)classContainer(Module):def__init__(self,**kwargs:Any)->None:super(Container,self).__init__()# DeprecationWarning is ignored by default <sigh>warnings.warn("nn.Container is deprecated. All of it's functionality ""is now implemented in nn.Module. Subclass that instead.")forkey,valueinkwargs.items():self.add_module(key,value)
[docs]classSequential(Module):r"""A sequential container. Modules will be added to it in the order they are passed in the constructor. Alternatively, an ``OrderedDict`` of modules can be passed in. The ``forward()`` method of ``Sequential`` accepts any input and forwards it to the first module it contains. It then "chains" outputs to inputs sequentially for each subsequent module, finally returning the output of the last module. The value a ``Sequential`` provides over manually calling a sequence of modules is that it allows treating the whole container as a single module, such that performing a transformation on the ``Sequential`` applies to each of the modules it stores (which are each a registered submodule of the ``Sequential``). What's the difference between a ``Sequential`` and a :class:`torch.nn.ModuleList`? A ``ModuleList`` is exactly what it sounds like--a list for storing ``Module`` s! On the other hand, the layers in a ``Sequential`` are connected in a cascading way. Example:: # Using Sequential to create a small model. When `model` is run, # input will first be passed to `Conv2d(1,20,5)`. The output of # `Conv2d(1,20,5)` will be used as the input to the first # `ReLU`; the output of the first `ReLU` will become the input # for `Conv2d(20,64,5)`. Finally, the output of # `Conv2d(20,64,5)` will be used as input to the second `ReLU` model = nn.Sequential( nn.Conv2d(1,20,5), nn.ReLU(), nn.Conv2d(20,64,5), nn.ReLU() ) # Using Sequential with OrderedDict. This is functionally the # same as the above code model = nn.Sequential(OrderedDict([ ('conv1', nn.Conv2d(1,20,5)), ('relu1', nn.ReLU()), ('conv2', nn.Conv2d(20,64,5)), ('relu2', nn.ReLU()) ])) """_modules:Dict[str,Module]# type: ignore[assignment]@overloaddef__init__(self,*args:Module)->None:...@overloaddef__init__(self,arg:'OrderedDict[str, Module]')->None:...def__init__(self,*args):super(Sequential,self).__init__()iflen(args)==1andisinstance(args[0],OrderedDict):forkey,moduleinargs[0].items():self.add_module(key,module)else:foridx,moduleinenumerate(args):self.add_module(str(idx),module)def_get_item_by_idx(self,iterator,idx)->T:"""Get the idx-th item of the iterator"""size=len(self)idx=operator.index(idx)ifnot-size<=idx<size:raiseIndexError('index {} is out of range'.format(idx))idx%=sizereturnnext(islice(iterator,idx,None))@_copy_to_script_wrapperdef__getitem__(self,idx)->Union['Sequential',T]:ifisinstance(idx,slice):returnself.__class__(OrderedDict(list(self._modules.items())[idx]))else:returnself._get_item_by_idx(self._modules.values(),idx)def__setitem__(self,idx:int,module:Module)->None:key:str=self._get_item_by_idx(self._modules.keys(),idx)returnsetattr(self,key,module)def__delitem__(self,idx:Union[slice,int])->None:ifisinstance(idx,slice):forkeyinlist(self._modules.keys())[idx]:delattr(self,key)else:key=self._get_item_by_idx(self._modules.keys(),idx)delattr(self,key)@_copy_to_script_wrapperdef__len__(self)->int:returnlen(self._modules)@_copy_to_script_wrapperdef__dir__(self):keys=super(Sequential,self).__dir__()keys=[keyforkeyinkeysifnotkey.isdigit()]returnkeys@_copy_to_script_wrapperdef__iter__(self)->Iterator[Module]:returniter(self._modules.values())# NB: We can't really type check this function as the type of input# may change dynamically (as is tested in# TestScript.test_sequential_intermediary_types). Cannot annotate# with Any as TorchScript expects a more precise typedefforward(self,input):formoduleinself:input=module(input)returninput
[docs]classModuleList(Module):r"""Holds submodules in a list. :class:`~torch.nn.ModuleList` can be indexed like a regular Python list, but modules it contains are properly registered, and will be visible by all :class:`~torch.nn.Module` methods. Args: modules (iterable, optional): an iterable of modules to add Example:: class MyModule(nn.Module): def __init__(self): super(MyModule, self).__init__() self.linears = nn.ModuleList([nn.Linear(10, 10) for i in range(10)]) def forward(self, x): # ModuleList can act as an iterable, or be indexed using ints for i, l in enumerate(self.linears): x = self.linears[i // 2](x) + l(x) return x """_modules:Dict[str,Module]# type: ignore[assignment]def__init__(self,modules:Optional[Iterable[Module]]=None)->None:super(ModuleList,self).__init__()ifmodulesisnotNone:self+=modulesdef_get_abs_string_index(self,idx):"""Get the absolute index for the list of modules"""idx=operator.index(idx)ifnot(-len(self)<=idx<len(self)):raiseIndexError('index {} is out of range'.format(idx))ifidx<0:idx+=len(self)returnstr(idx)@_copy_to_script_wrapperdef__getitem__(self,idx:int)->Module:ifisinstance(idx,slice):returnself.__class__(list(self._modules.values())[idx])else:returnself._modules[self._get_abs_string_index(idx)]def__setitem__(self,idx:int,module:Module)->None:idx=self._get_abs_string_index(idx)returnsetattr(self,str(idx),module)def__delitem__(self,idx:Union[int,slice])->None:ifisinstance(idx,slice):forkinrange(len(self._modules))[idx]:delattr(self,str(k))else:delattr(self,self._get_abs_string_index(idx))# To preserve numbering, self._modules is being reconstructed with modules after deletionstr_indices=[str(i)foriinrange(len(self._modules))]self._modules=OrderedDict(list(zip(str_indices,self._modules.values())))@_copy_to_script_wrapperdef__len__(self)->int:returnlen(self._modules)@_copy_to_script_wrapperdef__iter__(self)->Iterator[Module]:returniter(self._modules.values())def__iadd__(self,modules:Iterable[Module])->'ModuleList':returnself.extend(modules)@_copy_to_script_wrapperdef__dir__(self):keys=super(ModuleList,self).__dir__()keys=[keyforkeyinkeysifnotkey.isdigit()]returnkeys
[docs]definsert(self,index:int,module:Module)->None:r"""Insert a given module before a given index in the list. Args: index (int): index to insert. module (nn.Module): module to insert """foriinrange(len(self._modules),index,-1):self._modules[str(i)]=self._modules[str(i-1)]self._modules[str(index)]=module
[docs]defappend(self,module:Module)->'ModuleList':r"""Appends a given module to the end of the list. Args: module (nn.Module): module to append """self.add_module(str(len(self)),module)returnself
[docs]defextend(self,modules:Iterable[Module])->'ModuleList':r"""Appends modules from a Python iterable to the end of the list. Args: modules (iterable): iterable of modules to append """ifnotisinstance(modules,container_abcs.Iterable):raiseTypeError("ModuleList.extend should be called with an ""iterable, but got "+type(modules).__name__)offset=len(self)fori,moduleinenumerate(modules):self.add_module(str(offset+i),module)returnself
# remove forward alltogether to fallback on Module's _forward_unimplemented
[docs]classModuleDict(Module):r"""Holds submodules in a dictionary. :class:`~torch.nn.ModuleDict` can be indexed like a regular Python dictionary, but modules it contains are properly registered, and will be visible by all :class:`~torch.nn.Module` methods. :class:`~torch.nn.ModuleDict` is an **ordered** dictionary that respects * the order of insertion, and * in :meth:`~torch.nn.ModuleDict.update`, the order of the merged ``OrderedDict``, ``dict`` (started from Python 3.6) or another :class:`~torch.nn.ModuleDict` (the argument to :meth:`~torch.nn.ModuleDict.update`). Note that :meth:`~torch.nn.ModuleDict.update` with other unordered mapping types (e.g., Python's plain ``dict`` before Python version 3.6) does not preserve the order of the merged mapping. Args: modules (iterable, optional): a mapping (dictionary) of (string: module) or an iterable of key-value pairs of type (string, module) Example:: class MyModule(nn.Module): def __init__(self): super(MyModule, self).__init__() self.choices = nn.ModuleDict({ 'conv': nn.Conv2d(10, 10, 3), 'pool': nn.MaxPool2d(3) }) self.activations = nn.ModuleDict([ ['lrelu', nn.LeakyReLU()], ['prelu', nn.PReLU()] ]) def forward(self, x, choice, act): x = self.choices[choice](x) x = self.activations[act](x) return x """_modules:Dict[str,Module]# type: ignore[assignment]def__init__(self,modules:Optional[Mapping[str,Module]]=None)->None:super(ModuleDict,self).__init__()ifmodulesisnotNone:self.update(modules)@_copy_to_script_wrapperdef__getitem__(self,key:str)->Module:returnself._modules[key]def__setitem__(self,key:str,module:Module)->None:self.add_module(key,module)def__delitem__(self,key:str)->None:delself._modules[key]@_copy_to_script_wrapperdef__len__(self)->int:returnlen(self._modules)@_copy_to_script_wrapperdef__iter__(self)->Iterator[str]:returniter(self._modules)@_copy_to_script_wrapperdef__contains__(self,key:str)->bool:returnkeyinself._modules
[docs]defclear(self)->None:"""Remove all items from the ModuleDict. """self._modules.clear()
[docs]defpop(self,key:str)->Module:r"""Remove key from the ModuleDict and return its module. Args: key (string): key to pop from the ModuleDict """v=self[key]delself[key]returnv
[docs]@_copy_to_script_wrapperdefkeys(self)->Iterable[str]:r"""Return an iterable of the ModuleDict keys. """returnself._modules.keys()
[docs]@_copy_to_script_wrapperdefitems(self)->Iterable[Tuple[str,Module]]:r"""Return an iterable of the ModuleDict key/value pairs. """returnself._modules.items()
[docs]@_copy_to_script_wrapperdefvalues(self)->Iterable[Module]:r"""Return an iterable of the ModuleDict values. """returnself._modules.values()
[docs]defupdate(self,modules:Mapping[str,Module])->None:r"""Update the :class:`~torch.nn.ModuleDict` with the key-value pairs from a mapping or an iterable, overwriting existing keys. .. note:: If :attr:`modules` is an ``OrderedDict``, a :class:`~torch.nn.ModuleDict`, or an iterable of key-value pairs, the order of new elements in it is preserved. Args: modules (iterable): a mapping (dictionary) from string to :class:`~torch.nn.Module`, or an iterable of key-value pairs of type (string, :class:`~torch.nn.Module`) """ifnotisinstance(modules,container_abcs.Iterable):raiseTypeError("ModuleDict.update should be called with an ""iterable of key/value pairs, but got "+type(modules).__name__)ifisinstance(modules,(OrderedDict,ModuleDict,container_abcs.Mapping)):forkey,moduleinmodules.items():self[key]=moduleelse:# modules here can be a list with two itemsforj,minenumerate(modules):ifnotisinstance(m,container_abcs.Iterable):raiseTypeError("ModuleDict update sequence element ""#"+str(j)+" should be Iterable; is"+type(m).__name__)ifnotlen(m)==2:raiseValueError("ModuleDict update sequence element ""#"+str(j)+" has length "+str(len(m))+"; 2 is required")# modules can be Mapping (what it's typed at), or a list: [(name1, module1), (name2, module2)]# that's too cumbersome to type correctly with overloads, so we add an ignore hereself[m[0]]=m[1]# type: ignore[assignment]
# remove forward alltogether to fallback on Module's _forward_unimplemented
[docs]classParameterList(Module):r"""Holds parameters in a list. :class:`~torch.nn.ParameterList` can be indexed like a regular Python list, but parameters it contains are properly registered, and will be visible by all :class:`~torch.nn.Module` methods. Args: parameters (iterable, optional): an iterable of :class:`~torch.nn.Parameter` to add Example:: class MyModule(nn.Module): def __init__(self): super(MyModule, self).__init__() self.params = nn.ParameterList([nn.Parameter(torch.randn(10, 10)) for i in range(10)]) def forward(self, x): # ParameterList can act as an iterable, or be indexed using ints for i, p in enumerate(self.params): x = self.params[i // 2].mm(x) + p.mm(x) return x """_parameters:Dict[str,'Parameter']# type: ignore[assignment]def__init__(self,parameters:Optional[Iterable['Parameter']]=None)->None:super(ParameterList,self).__init__()self._initialized=TrueifparametersisnotNone:self+=parametersdef__setstate__(self,state):state['_initialized']=Falsesuper(ParameterList,self).__setstate__(state)self._initialized=Truedef_get_abs_string_index(self,idx):"""Get the absolute index for the list of modules"""idx=operator.index(idx)ifnot(-len(self)<=idx<len(self)):raiseIndexError('index {} is out of range'.format(idx))ifidx<0:idx+=len(self)returnstr(idx)@overloaddef__getitem__(self,idx:int)->'Parameter':...@overloaddef__getitem__(self:T,idx:slice)->T:...def__getitem__(self,idx):ifisinstance(idx,slice):returnself.__class__(list(self._parameters.values())[idx])else:idx=self._get_abs_string_index(idx)returnself._parameters[str(idx)]def__setitem__(self,idx:int,param:'Parameter')->None:idx=self._get_abs_string_index(idx)returnself.register_parameter(str(idx),param)def__setattr__(self,key:Any,value:Any)->None:ifgetattr(self,"_initialized",False):ifnothasattr(self,key)andnotisinstance(value,torch.nn.Parameter):warnings.warn("Setting attributes on ParameterList is not supported.")super(ParameterList,self).__setattr__(key,value)def__len__(self)->int:returnlen(self._parameters)def__iter__(self)->Iterator['Parameter']:returniter(self._parameters.values())def__iadd__(self,parameters:Iterable['Parameter'])->'ParameterList':returnself.extend(parameters)def__dir__(self):keys=super(ParameterList,self).__dir__()keys=[keyforkeyinkeysifnotkey.isdigit()]returnkeys
[docs]defappend(self,parameter:'Parameter')->'ParameterList':"""Appends a given parameter at the end of the list. Args: parameter (nn.Parameter): parameter to append """self.register_parameter(str(len(self)),parameter)returnself
[docs]defextend(self,parameters:Iterable['Parameter'])->'ParameterList':"""Appends parameters from a Python iterable to the end of the list. Args: parameters (iterable): iterable of parameters to append """ifnotisinstance(parameters,container_abcs.Iterable):raiseTypeError("ParameterList.extend should be called with an ""iterable, but got "+type(parameters).__name__)offset=len(self)fori,paraminenumerate(parameters):self.register_parameter(str(offset+i),param)returnself
defextra_repr(self)->str:child_lines=[]fork,pinself._parameters.items():size_str='x'.join(str(size)forsizeinp.size())device_str=''ifnotp.is_cudaelse' (GPU {})'.format(p.get_device())parastr='Parameter containing: [{} of size {}{}]'.format(torch.typename(p),size_str,device_str)child_lines.append(' ('+str(k)+'): '+parastr)tmpstr='\n'.join(child_lines)returntmpstrdef__call__(self,input):raiseRuntimeError('ParameterList should not be called.')def_replicate_for_data_parallel(self):warnings.warn("nn.ParameterList is being used with DataParallel but this is not ""supported. This list will appear empty for the models replicated ""on each GPU except the original one.")returnsuper(ParameterList,self)._replicate_for_data_parallel()
[docs]classParameterDict(Module):r"""Holds parameters in a dictionary. ParameterDict can be indexed like a regular Python dictionary, but parameters it contains are properly registered, and will be visible by all Module methods. :class:`~torch.nn.ParameterDict` is an **ordered** dictionary that respects * the order of insertion, and * in :meth:`~torch.nn.ParameterDict.update`, the order of the merged ``OrderedDict`` or another :class:`~torch.nn.ParameterDict` (the argument to :meth:`~torch.nn.ParameterDict.update`). Note that :meth:`~torch.nn.ParameterDict.update` with other unordered mapping types (e.g., Python's plain ``dict``) does not preserve the order of the merged mapping. Args: parameters (iterable, optional): a mapping (dictionary) of (string : :class:`~torch.nn.Parameter`) or an iterable of key-value pairs of type (string, :class:`~torch.nn.Parameter`) Example:: class MyModule(nn.Module): def __init__(self): super(MyModule, self).__init__() self.params = nn.ParameterDict({ 'left': nn.Parameter(torch.randn(5, 10)), 'right': nn.Parameter(torch.randn(5, 10)) }) def forward(self, x, choice): x = self.params[choice].mm(x) return x """_parameters:Dict[str,'Parameter']# type: ignore[assignment]def__init__(self,parameters:Optional[Mapping[str,'Parameter']]=None)->None:super(ParameterDict,self).__init__()self._initialized=TrueifparametersisnotNone:self.update(parameters)def__setstate__(self,state):state['_initialized']=Falsesuper(ParameterDict,self).__setstate__(state)self._initialized=Truedef__getitem__(self,key:str)->'Parameter':returnself._parameters[key]def__setitem__(self,key:str,parameter:'Parameter')->None:self.register_parameter(key,parameter)def__delitem__(self,key:str)->None:delself._parameters[key]def__setattr__(self,key:Any,value:Any)->None:ifgetattr(self,"_initialized",False):ifnothasattr(self,key)andnotisinstance(value,torch.nn.Parameter):warnings.warn("Setting attributes on ParameterDict is not supported.")super(ParameterDict,self).__setattr__(key,value)def__len__(self)->int:returnlen(self._parameters)def__iter__(self)->Iterator[str]:returniter(self._parameters.keys())def__contains__(self,key:str)->bool:returnkeyinself._parameters
[docs]defclear(self)->None:"""Remove all items from the ParameterDict. """self._parameters.clear()
[docs]defpop(self,key:str)->'Parameter':r"""Remove key from the ParameterDict and return its parameter. Args: key (string): key to pop from the ParameterDict """v=self[key]delself[key]returnv
[docs]defkeys(self)->Iterable[str]:r"""Return an iterable of the ParameterDict keys. """returnself._parameters.keys()
[docs]defitems(self)->Iterable[Tuple[str,'Parameter']]:r"""Return an iterable of the ParameterDict key/value pairs. """returnself._parameters.items()
[docs]defvalues(self)->Iterable['Parameter']:r"""Return an iterable of the ParameterDict values. """returnself._parameters.values()
[docs]defupdate(self,parameters:Mapping[str,'Parameter'])->None:r"""Update the :class:`~torch.nn.ParameterDict` with the key-value pairs from a mapping or an iterable, overwriting existing keys. .. note:: If :attr:`parameters` is an ``OrderedDict``, a :class:`~torch.nn.ParameterDict`, or an iterable of key-value pairs, the order of new elements in it is preserved. Args: parameters (iterable): a mapping (dictionary) from string to :class:`~torch.nn.Parameter`, or an iterable of key-value pairs of type (string, :class:`~torch.nn.Parameter`) """ifnotisinstance(parameters,container_abcs.Iterable):raiseTypeError("ParametersDict.update should be called with an ""iterable of key/value pairs, but got "+type(parameters).__name__)ifisinstance(parameters,(OrderedDict,ParameterDict)):forkey,parameterinparameters.items():self[key]=parameterelifisinstance(parameters,container_abcs.Mapping):forkey,parameterinsorted(parameters.items()):self[key]=parameterelse:forj,pinenumerate(parameters):ifnotisinstance(p,container_abcs.Iterable):raiseTypeError("ParameterDict update sequence element ""#"+str(j)+" should be Iterable; is"+type(p).__name__)ifnotlen(p)==2:raiseValueError("ParameterDict update sequence element ""#"+str(j)+" has length "+str(len(p))+"; 2 is required")# parameters as length-2 list too cumbersome to type, see ModuleDict.update commentself[p[0]]=p[1]# type: ignore[assignment]
defextra_repr(self)->str:child_lines=[]fork,pinself._parameters.items():size_str='x'.join(str(size)forsizeinp.size())device_str=''ifnotp.is_cudaelse' (GPU {})'.format(p.get_device())parastr='Parameter containing: [{} of size {}{}]'.format(torch.typename(p),size_str,device_str)child_lines.append(' ('+k+'): '+parastr)tmpstr='\n'.join(child_lines)returntmpstrdef__call__(self,input):raiseRuntimeError('ParameterDict should not be called.')def_replicate_for_data_parallel(self):warnings.warn("nn.ParameterDict is being used with DataParallel but this is not ""supported. This dict will appear empty for the models replicated ""on each GPU except the original one.")returnsuper(ParameterDict,self)._replicate_for_data_parallel()
Docs
Access comprehensive developer documentation for PyTorch
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: Cookies Policy.