# mypy: allow-untyped-decorators# mypy: allow-untyped-defsimportoperatorfromcollectionsimportabcascontainer_abcs,OrderedDictfromcollections.abcimportIterable,Iterator,Mappingfromitertoolsimportchain,islicefromtypingimportAny,Optional,overload,TypeVar,Unionfromtyping_extensionsimportdeprecated,Selfimporttorchfromtorch._jit_internalimport_copy_to_script_wrapperfromtorch.nn.parameterimportParameterfrom.moduleimportModule__all__=["Container","Sequential","ModuleList","ModuleDict","ParameterList","ParameterDict",]T=TypeVar("T",bound=Module)# Copied from torch.nn.modules.module, required for a custom __repr__ for ModuleListdef_addindent(s_,numSpaces):s=s_.split("\n")# don't do anything for single-line stuffiflen(s)==1:returns_first=s.pop(0)s=[(numSpaces*" ")+lineforlineins]s="\n".join(s)s=first+"\n"+sreturns@deprecated("`nn.Container` is deprecated. ""All of it's functionality is now implemented in `nn.Module`. Subclass that instead.",category=FutureWarning,)classContainer(Module):def__init__(self,**kwargs:Any)->None:super().__init__()forkey,valueinkwargs.items():self.add_module(key,value)
[docs]classSequential(Module):r"""A sequential container. Modules will be added to it in the order they are passed in the constructor. Alternatively, an ``OrderedDict`` of modules can be passed in. The ``forward()`` method of ``Sequential`` accepts any input and forwards it to the first module it contains. It then "chains" outputs to inputs sequentially for each subsequent module, finally returning the output of the last module. The value a ``Sequential`` provides over manually calling a sequence of modules is that it allows treating the whole container as a single module, such that performing a transformation on the ``Sequential`` applies to each of the modules it stores (which are each a registered submodule of the ``Sequential``). What's the difference between a ``Sequential`` and a :class:`torch.nn.ModuleList`? A ``ModuleList`` is exactly what it sounds like--a list for storing ``Module`` s! On the other hand, the layers in a ``Sequential`` are connected in a cascading way. Example:: # Using Sequential to create a small model. When `model` is run, # input will first be passed to `Conv2d(1,20,5)`. The output of # `Conv2d(1,20,5)` will be used as the input to the first # `ReLU`; the output of the first `ReLU` will become the input # for `Conv2d(20,64,5)`. Finally, the output of # `Conv2d(20,64,5)` will be used as input to the second `ReLU` model = nn.Sequential( nn.Conv2d(1,20,5), nn.ReLU(), nn.Conv2d(20,64,5), nn.ReLU() ) # Using Sequential with OrderedDict. This is functionally the # same as the above code model = nn.Sequential(OrderedDict([ ('conv1', nn.Conv2d(1,20,5)), ('relu1', nn.ReLU()), ('conv2', nn.Conv2d(20,64,5)), ('relu2', nn.ReLU()) ])) """_modules:dict[str,Module]# type: ignore[assignment]@overloaddef__init__(self,*args:Module)->None:...@overloaddef__init__(self,arg:"OrderedDict[str, Module]")->None:...def__init__(self,*args):super().__init__()iflen(args)==1andisinstance(args[0],OrderedDict):forkey,moduleinargs[0].items():self.add_module(key,module)else:foridx,moduleinenumerate(args):self.add_module(str(idx),module)def_get_item_by_idx(self,iterator,idx)->T:# type: ignore[misc, type-var]"""Get the idx-th item of the iterator."""size=len(self)idx=operator.index(idx)ifnot-size<=idx<size:raiseIndexError(f"index {idx} is out of range")idx%=sizereturnnext(islice(iterator,idx,None))@_copy_to_script_wrapperdef__getitem__(self,idx:Union[slice,int])->Union["Sequential",T]:ifisinstance(idx,slice):returnself.__class__(OrderedDict(list(self._modules.items())[idx]))else:returnself._get_item_by_idx(self._modules.values(),idx)def__setitem__(self,idx:int,module:Module)->None:key:str=self._get_item_by_idx(self._modules.keys(),idx)returnsetattr(self,key,module)def__delitem__(self,idx:Union[slice,int])->None:ifisinstance(idx,slice):forkeyinlist(self._modules.keys())[idx]:delattr(self,key)else:key=self._get_item_by_idx(self._modules.keys(),idx)delattr(self,key)# To preserve numberingstr_indices=[str(i)foriinrange(len(self._modules))]self._modules=OrderedDict(list(zip(str_indices,self._modules.values())))@_copy_to_script_wrapperdef__len__(self)->int:returnlen(self._modules)def__add__(self,other)->"Sequential":ifisinstance(other,Sequential):ret=Sequential()forlayerinself:ret.append(layer)forlayerinother:ret.append(layer)returnretelse:raiseValueError("add operator supports only objects "f"of Sequential class, but {str(type(other))} is given.")defpop(self,key:Union[int,slice])->Module:v=self[key]delself[key]returnvdef__iadd__(self,other)->Self:ifisinstance(other,Sequential):offset=len(self)fori,moduleinenumerate(other):self.add_module(str(i+offset),module)returnselfelse:raiseValueError("add operator supports only objects "f"of Sequential class, but {str(type(other))} is given.")def__mul__(self,other:int)->"Sequential":ifnotisinstance(other,int):raiseTypeError(f"unsupported operand type(s) for *: {type(self)} and {type(other)}")elifother<=0:raiseValueError(f"Non-positive multiplication factor {other} for {type(self)}")else:combined=Sequential()offset=0for_inrange(other):formoduleinself:combined.add_module(str(offset),module)offset+=1returncombineddef__rmul__(self,other:int)->"Sequential":returnself.__mul__(other)def__imul__(self,other:int)->Self:ifnotisinstance(other,int):raiseTypeError(f"unsupported operand type(s) for *: {type(self)} and {type(other)}")elifother<=0:raiseValueError(f"Non-positive multiplication factor {other} for {type(self)}")else:len_original=len(self)offset=len(self)for_inrange(other-1):foriinrange(len_original):self.add_module(str(i+offset),self._modules[str(i)])offset+=len_originalreturnself@_copy_to_script_wrapperdef__dir__(self):keys=super().__dir__()keys=[keyforkeyinkeysifnotkey.isdigit()]returnkeys@_copy_to_script_wrapperdef__iter__(self)->Iterator[Module]:returniter(self._modules.values())# NB: We can't really type check this function as the type of input# may change dynamically (as is tested in# TestScript.test_sequential_intermediary_types). Cannot annotate# with Any as TorchScript expects a more precise typedefforward(self,input):formoduleinself:input=module(input)returninput
[docs]defappend(self,module:Module)->"Sequential":r"""Append a given module to the end. Args: module (nn.Module): module to append """self.add_module(str(len(self)),module)returnself
definsert(self,index:int,module:Module)->"Sequential":ifnotisinstance(module,Module):raiseAssertionError(f"module should be of type: {Module}")n=len(self._modules)ifnot(-n<=index<=n):raiseIndexError(f"Index out of range: {index}")ifindex<0:index+=nforiinrange(n,index,-1):self._modules[str(i)]=self._modules[str(i-1)]self._modules[str(index)]=modulereturnselfdefextend(self,sequential)->"Sequential":forlayerinsequential:self.append(layer)returnself
[docs]classModuleList(Module):r"""Holds submodules in a list. :class:`~torch.nn.ModuleList` can be indexed like a regular Python list, but modules it contains are properly registered, and will be visible by all :class:`~torch.nn.Module` methods. Args: modules (iterable, optional): an iterable of modules to add Example:: class MyModule(nn.Module): def __init__(self) -> None: super().__init__() self.linears = nn.ModuleList([nn.Linear(10, 10) for i in range(10)]) def forward(self, x): # ModuleList can act as an iterable, or be indexed using ints for i, l in enumerate(self.linears): x = self.linears[i // 2](x) + l(x) return x """_modules:dict[str,Module]# type: ignore[assignment]def__init__(self,modules:Optional[Iterable[Module]]=None)->None:super().__init__()ifmodulesisnotNone:self+=modulesdef_get_abs_string_index(self,idx):"""Get the absolute index for the list of modules."""idx=operator.index(idx)ifnot(-len(self)<=idx<len(self)):raiseIndexError(f"index {idx} is out of range")ifidx<0:idx+=len(self)returnstr(idx)@overloaddef__getitem__(self,idx:slice)->"ModuleList":...@overloaddef__getitem__(self,idx:int)->Module:...@_copy_to_script_wrapperdef__getitem__(self,idx:Union[int,slice])->Union[Module,"ModuleList"]:ifisinstance(idx,slice):returnself.__class__(list(self._modules.values())[idx])else:returnself._modules[self._get_abs_string_index(idx)]def__setitem__(self,idx:int,module:Module)->None:idx=self._get_abs_string_index(idx)returnsetattr(self,str(idx),module)def__delitem__(self,idx:Union[int,slice])->None:ifisinstance(idx,slice):forkinrange(len(self._modules))[idx]:delattr(self,str(k))else:delattr(self,self._get_abs_string_index(idx))# To preserve numbering, self._modules is being reconstructed with modules after deletionstr_indices=[str(i)foriinrange(len(self._modules))]self._modules=OrderedDict(list(zip(str_indices,self._modules.values())))@_copy_to_script_wrapperdef__len__(self)->int:returnlen(self._modules)@_copy_to_script_wrapperdef__iter__(self)->Iterator[Module]:returniter(self._modules.values())def__iadd__(self,modules:Iterable[Module])->Self:returnself.extend(modules)def__add__(self,other:Iterable[Module])->"ModuleList":combined=ModuleList()fori,moduleinenumerate(chain(self,other)):combined.add_module(str(i),module)returncombineddef__repr__(self):"""Return a custom repr for ModuleList that compresses repeated module representations."""list_of_reprs=[repr(item)foriteminself]iflen(list_of_reprs)==0:returnself._get_name()+"()"start_end_indices=[[0,0]]repeated_blocks=[list_of_reprs[0]]fori,rinenumerate(list_of_reprs[1:],1):ifr==repeated_blocks[-1]:start_end_indices[-1][1]+=1continuestart_end_indices.append([i,i])repeated_blocks.append(r)lines=[]main_str=self._get_name()+"("for(start_id,end_id),binzip(start_end_indices,repeated_blocks):local_repr=f"({start_id}): {b}"# default reprifstart_id!=end_id:n=end_id-start_id+1local_repr=f"({start_id}-{end_id}): {n} x {b}"local_repr=_addindent(local_repr,2)lines.append(local_repr)main_str+="\n "+"\n ".join(lines)+"\n"main_str+=")"returnmain_str@_copy_to_script_wrapperdef__dir__(self):keys=super().__dir__()keys=[keyforkeyinkeysifnotkey.isdigit()]returnkeys
[docs]definsert(self,index:int,module:Module)->None:r"""Insert a given module before a given index in the list. Args: index (int): index to insert. module (nn.Module): module to insert """foriinrange(len(self._modules),index,-1):self._modules[str(i)]=self._modules[str(i-1)]self._modules[str(index)]=module
[docs]defappend(self,module:Module)->"ModuleList":r"""Append a given module to the end of the list. Args: module (nn.Module): module to append """self.add_module(str(len(self)),module)returnself
[docs]defextend(self,modules:Iterable[Module])->Self:r"""Append modules from a Python iterable to the end of the list. Args: modules (iterable): iterable of modules to append """ifnotisinstance(modules,container_abcs.Iterable):raiseTypeError("ModuleList.extend should be called with an ""iterable, but got "+type(modules).__name__)offset=len(self)fori,moduleinenumerate(modules):self.add_module(str(offset+i),module)returnself
# remove forward alltogether to fallback on Module's _forward_unimplemented
[docs]classModuleDict(Module):r"""Holds submodules in a dictionary. :class:`~torch.nn.ModuleDict` can be indexed like a regular Python dictionary, but modules it contains are properly registered, and will be visible by all :class:`~torch.nn.Module` methods. :class:`~torch.nn.ModuleDict` is an **ordered** dictionary that respects * the order of insertion, and * in :meth:`~torch.nn.ModuleDict.update`, the order of the merged ``OrderedDict``, ``dict`` (started from Python 3.6) or another :class:`~torch.nn.ModuleDict` (the argument to :meth:`~torch.nn.ModuleDict.update`). Note that :meth:`~torch.nn.ModuleDict.update` with other unordered mapping types (e.g., Python's plain ``dict`` before Python version 3.6) does not preserve the order of the merged mapping. Args: modules (iterable, optional): a mapping (dictionary) of (string: module) or an iterable of key-value pairs of type (string, module) Example:: class MyModule(nn.Module): def __init__(self) -> None: super().__init__() self.choices = nn.ModuleDict({ 'conv': nn.Conv2d(10, 10, 3), 'pool': nn.MaxPool2d(3) }) self.activations = nn.ModuleDict([ ['lrelu', nn.LeakyReLU()], ['prelu', nn.PReLU()] ]) def forward(self, x, choice, act): x = self.choices[choice](x) x = self.activations[act](x) return x """_modules:dict[str,Module]# type: ignore[assignment]def__init__(self,modules:Optional[Mapping[str,Module]]=None)->None:super().__init__()ifmodulesisnotNone:self.update(modules)@_copy_to_script_wrapperdef__getitem__(self,key:str)->Module:returnself._modules[key]def__setitem__(self,key:str,module:Module)->None:self.add_module(key,module)def__delitem__(self,key:str)->None:delself._modules[key]@_copy_to_script_wrapperdef__len__(self)->int:returnlen(self._modules)@_copy_to_script_wrapperdef__iter__(self)->Iterator[str]:returniter(self._modules)@_copy_to_script_wrapperdef__contains__(self,key:str)->bool:returnkeyinself._modules
[docs]defclear(self)->None:"""Remove all items from the ModuleDict."""self._modules.clear()
[docs]defpop(self,key:str)->Module:r"""Remove key from the ModuleDict and return its module. Args: key (str): key to pop from the ModuleDict """v=self[key]delself[key]returnv
[docs]@_copy_to_script_wrapperdefkeys(self)->Iterable[str]:r"""Return an iterable of the ModuleDict keys."""returnself._modules.keys()
[docs]@_copy_to_script_wrapperdefitems(self)->Iterable[tuple[str,Module]]:r"""Return an iterable of the ModuleDict key/value pairs."""returnself._modules.items()
[docs]@_copy_to_script_wrapperdefvalues(self)->Iterable[Module]:r"""Return an iterable of the ModuleDict values."""returnself._modules.values()
[docs]defupdate(self,modules:Mapping[str,Module])->None:r"""Update the :class:`~torch.nn.ModuleDict` with key-value pairs from a mapping, overwriting existing keys. .. note:: If :attr:`modules` is an ``OrderedDict``, a :class:`~torch.nn.ModuleDict`, or an iterable of key-value pairs, the order of new elements in it is preserved. Args: modules (iterable): a mapping (dictionary) from string to :class:`~torch.nn.Module`, or an iterable of key-value pairs of type (string, :class:`~torch.nn.Module`) """ifnotisinstance(modules,container_abcs.Iterable):raiseTypeError("ModuleDict.update should be called with an ""iterable of key/value pairs, but got "+type(modules).__name__)ifisinstance(modules,(OrderedDict,ModuleDict,container_abcs.Mapping)):forkey,moduleinmodules.items():self[key]=moduleelse:# modules here can be a list with two itemsforj,minenumerate(modules):ifnotisinstance(m,container_abcs.Iterable):raiseTypeError("ModuleDict update sequence element ""#"+str(j)+" should be Iterable; is"+type(m).__name__)ifnotlen(m)==2:raiseValueError("ModuleDict update sequence element ""#"+str(j)+" has length "+str(len(m))+"; 2 is required")# modules can be Mapping (what it's typed at), or a list: [(name1, module1), (name2, module2)]# that's too cumbersome to type correctly with overloads, so we add an ignore hereself[m[0]]=m[1]# type: ignore[assignment]
# remove forward alltogether to fallback on Module's _forward_unimplemented
[docs]classParameterList(Module):r"""Holds parameters in a list. :class:`~torch.nn.ParameterList` can be used like a regular Python list, but Tensors that are :class:`~torch.nn.Parameter` are properly registered, and will be visible by all :class:`~torch.nn.Module` methods. Note that the constructor, assigning an element of the list, the :meth:`~torch.nn.ParameterList.append` method and the :meth:`~torch.nn.ParameterList.extend` method will convert any :class:`~torch.Tensor` into :class:`~torch.nn.Parameter`. Args: parameters (iterable, optional): an iterable of elements to add to the list. Example:: class MyModule(nn.Module): def __init__(self) -> None: super().__init__() self.params = nn.ParameterList([nn.Parameter(torch.randn(10, 10)) for i in range(10)]) def forward(self, x): # ParameterList can act as an iterable, or be indexed using ints for i, p in enumerate(self.params): x = self.params[i // 2].mm(x) + p.mm(x) return x """def__init__(self,values:Optional[Iterable[Any]]=None)->None:super().__init__()self._size=0ifvaluesisnotNone:self+=valuesdef_get_abs_string_index(self,idx):"""Get the absolute index for the list of modules."""idx=operator.index(idx)ifnot(-len(self)<=idx<len(self)):raiseIndexError(f"index {idx} is out of range")ifidx<0:idx+=len(self)returnstr(idx)@overloaddef__getitem__(self,idx:int)->Any:...@overloaddef__getitem__(self:T,idx:slice)->T:...def__getitem__(self,idx):ifisinstance(idx,slice):start,stop,step=idx.indices(len(self))out=self.__class__()foriinrange(start,stop,step):out.append(self[i])returnoutelse:idx=self._get_abs_string_index(idx)returngetattr(self,str(idx))def__setitem__(self,idx:int,param:Any)->None:# Note that all other function that add an entry to the list part of# the ParameterList end up here. So this is the only place where we need# to wrap things into Parameter if needed.# Objects added via setattr() are not in the list part and thus won't# call into this function.idx=self._get_abs_string_index(idx)ifisinstance(param,torch.Tensor)andnotisinstance(param,Parameter):param=Parameter(param)returnsetattr(self,str(idx),param)def__len__(self)->int:returnself._sizedef__iter__(self)->Iterator[Any]:returniter(self[i]foriinrange(len(self)))def__iadd__(self,parameters:Iterable[Any])->Self:returnself.extend(parameters)def__dir__(self):keys=super().__dir__()keys=[keyforkeyinkeysifnotkey.isdigit()]returnkeys
[docs]defappend(self,value:Any)->"ParameterList":"""Append a given value at the end of the list. Args: value (Any): value to append """new_idx=len(self)self._size+=1self[new_idx]=valuereturnself
[docs]defextend(self,values:Iterable[Any])->Self:"""Append values from a Python iterable to the end of the list. Args: values (iterable): iterable of values to append """# Tensor is an iterable but we never want to unpack it hereifnotisinstance(values,container_abcs.Iterable)orisinstance(values,torch.Tensor):raiseTypeError("ParameterList.extend should be called with an ""iterable, but got "+type(values).__name__)forvalueinvalues:self.append(value)returnself
defextra_repr(self)->str:child_lines=[]fork,pinenumerate(self):ifisinstance(p,torch.Tensor):size_str="x".join(str(size)forsizeinp.size())ifp.device.typein["cuda",torch._C._get_privateuse1_backend_name()]:device_str=f" ({p.device})"else:device_str=""parastr="{} containing: [{} of size {}{}]".format("Parameter"ifisinstance(p,Parameter)else"Tensor",p.dtype,size_str,device_str,)child_lines.append(" ("+str(k)+"): "+parastr)else:child_lines.append(" ("+str(k)+"): Object of type: "+type(p).__name__)tmpstr="\n".join(child_lines)returntmpstrdef__call__(self,*args,**kwargs):raiseRuntimeError("ParameterList should not be called.")
[docs]classParameterDict(Module):r"""Holds parameters in a dictionary. ParameterDict can be indexed like a regular Python dictionary, but Parameters it contains are properly registered, and will be visible by all Module methods. Other objects are treated as would be done by a regular Python dictionary :class:`~torch.nn.ParameterDict` is an **ordered** dictionary. :meth:`~torch.nn.ParameterDict.update` with other unordered mapping types (e.g., Python's plain ``dict``) does not preserve the order of the merged mapping. On the other hand, ``OrderedDict`` or another :class:`~torch.nn.ParameterDict` will preserve their ordering. Note that the constructor, assigning an element of the dictionary and the :meth:`~torch.nn.ParameterDict.update` method will convert any :class:`~torch.Tensor` into :class:`~torch.nn.Parameter`. Args: values (iterable, optional): a mapping (dictionary) of (string : Any) or an iterable of key-value pairs of type (string, Any) Example:: class MyModule(nn.Module): def __init__(self) -> None: super().__init__() self.params = nn.ParameterDict({ 'left': nn.Parameter(torch.randn(5, 10)), 'right': nn.Parameter(torch.randn(5, 10)) }) def forward(self, x, choice): x = self.params[choice].mm(x) return x """def__init__(self,parameters:Any=None)->None:super().__init__()self._keys:dict[str,None]={}ifparametersisnotNone:self.update(parameters)def_key_to_attr(self,key:str)->str:ifnotisinstance(key,str):raiseTypeError("Index given to ParameterDict cannot be used as a key as it is "f"not a string (type is '{type(key).__name__}'). Open an issue on ""github if you need non-string keys.")else:# Use the key as-is so that `.named_parameters()` returns the right thingreturnkeydef__getitem__(self,key:str)->Any:attr=self._key_to_attr(key)returngetattr(self,attr)def__setitem__(self,key:str,value:Any)->None:# Note that all other function that add an entry to the dictionary part of# the ParameterDict end up here. So this is the only place where we need# to wrap things into Parameter if needed.# Objects added via setattr() are not in the dictionary part and thus won't# call into this function.self._keys[key]=Noneattr=self._key_to_attr(key)ifisinstance(value,torch.Tensor)andnotisinstance(value,Parameter):value=Parameter(value)setattr(self,attr,value)def__delitem__(self,key:str)->None:delself._keys[key]attr=self._key_to_attr(key)delattr(self,attr)def__len__(self)->int:returnlen(self._keys)def__iter__(self)->Iterator[str]:returniter(self._keys)def__reversed__(self)->Iterator[str]:returnreversed(list(self._keys))
[docs]defcopy(self)->"ParameterDict":"""Return a copy of this :class:`~torch.nn.ParameterDict` instance."""# We have to use an OrderedDict because the ParameterDict constructor# behaves differently on plain dict vs OrderedDictreturnParameterDict(OrderedDict((k,self[k])forkinself._keys))
[docs]defsetdefault(self,key:str,default:Optional[Any]=None)->Any:"""Set the default for a key in the Parameterdict. If key is in the ParameterDict, return its value. If not, insert `key` with a parameter `default` and return `default`. `default` defaults to `None`. Args: key (str): key to set default for default (Any): the parameter set to the key """ifkeynotinself:self[key]=defaultreturnself[key]
[docs]defclear(self)->None:"""Remove all items from the ParameterDict."""forkinself._keys.copy():delself[k]
[docs]defpop(self,key:str)->Any:r"""Remove key from the ParameterDict and return its parameter. Args: key (str): key to pop from the ParameterDict """v=self[key]delself[key]returnv
[docs]defpopitem(self)->tuple[str,Any]:"""Remove and return the last inserted `(key, parameter)` pair from the ParameterDict."""k,_=self._keys.popitem()# We need the key in the _keys to be able to access/delself._keys[k]=Noneval=self[k]delself[k]returnk,val
[docs]defget(self,key:str,default:Optional[Any]=None)->Any:r"""Return the parameter associated with key if present. Otherwise return default if provided, None if not. Args: key (str): key to get from the ParameterDict default (Parameter, optional): value to return if key not present """returnself[key]ifkeyinselfelsedefault
[docs]deffromkeys(self,keys:Iterable[str],default:Optional[Any]=None)->"ParameterDict":r"""Return a new ParameterDict with the keys provided. Args: keys (iterable, string): keys to make the new ParameterDict from default (Parameter, optional): value to set for all keys """returnParameterDict((k,default)forkinkeys)
[docs]defkeys(self)->Iterable[str]:r"""Return an iterable of the ParameterDict keys."""returnself._keys.keys()
[docs]defitems(self)->Iterable[tuple[str,Any]]:r"""Return an iterable of the ParameterDict key/value pairs."""return((k,self[k])forkinself._keys)
[docs]defvalues(self)->Iterable[Any]:r"""Return an iterable of the ParameterDict values."""return(self[k]forkinself._keys)
[docs]defupdate(self,parameters:Union[Mapping[str,Any],"ParameterDict"])->None:r"""Update the :class:`~torch.nn.ParameterDict` with key-value pairs from ``parameters``, overwriting existing keys. .. note:: If :attr:`parameters` is an ``OrderedDict``, a :class:`~torch.nn.ParameterDict`, or an iterable of key-value pairs, the order of new elements in it is preserved. Args: parameters (iterable): a mapping (dictionary) from string to :class:`~torch.nn.Parameter`, or an iterable of key-value pairs of type (string, :class:`~torch.nn.Parameter`) """ifnotisinstance(parameters,container_abcs.Iterable):raiseTypeError("ParametersDict.update should be called with an ""iterable of key/value pairs, but got "+type(parameters).__name__)ifisinstance(parameters,(OrderedDict,ParameterDict)):forkey,parameterinparameters.items():self[key]=parameterelifisinstance(parameters,container_abcs.Mapping):forkey,parameterinsorted(parameters.items()):self[key]=parameterelse:forj,pinenumerate(parameters):ifnotisinstance(p,container_abcs.Iterable):raiseTypeError("ParameterDict update sequence element ""#"+str(j)+" should be Iterable; is"+type(p).__name__)ifnotlen(p)==2:raiseValueError("ParameterDict update sequence element ""#"+str(j)+" has length "+str(len(p))+"; 2 is required")# parameters as length-2 list too cumbersome to type, see ModuleDict.update commentself[p[0]]=p[1]# type: ignore[assignment]
defextra_repr(self)->str:child_lines=[]fork,pinself.items():ifisinstance(p,torch.Tensor):size_str="x".join(str(size)forsizeinp.size())ifp.device.typein["cuda",torch._C._get_privateuse1_backend_name()]:device_str=f" ({p.device})"else:device_str=""parastr="{} containing: [{} of size {}{}]".format("Parameter"ifisinstance(p,Parameter)else"Tensor",torch.typename(p),size_str,device_str,)child_lines.append(" ("+str(k)+"): "+parastr)else:child_lines.append(" ("+str(k)+"): Object of type: "+type(p).__name__)tmpstr="\n".join(child_lines)returntmpstrdef__call__(self,input):raiseRuntimeError("ParameterDict should not be called.")def__or__(self,other:"ParameterDict")->"ParameterDict":copy=self.copy()copy.update(other)returncopydef__ror__(self,other:"ParameterDict")->"ParameterDict":copy=other.copy()copy.update(self)returncopydef__ior__(self,other:"ParameterDict")->Self:self.update(other)returnself
Docs
Access comprehensive developer documentation for PyTorch
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: Cookies Policy.