from.moduleimportModulefrom.utilsimport_pair,_quadruple,_ntuplefrom..importfunctionalasFfromtorchimportTensorfrom..common_typesimport_size_2_t,_size_4_t,_size_6_tfromtypingimportSequence,Tuple# TODO: grad_output size asserts in THNN__all__=['CircularPad1d','CircularPad2d','CircularPad3d','ConstantPad1d','ConstantPad2d','ConstantPad3d','ReflectionPad1d','ReflectionPad2d','ReflectionPad3d','ReplicationPad1d','ReplicationPad2d','ReplicationPad3d','ZeroPad1d','ZeroPad2d','ZeroPad3d']class_CircularPadNd(Module):__constants__=['padding']padding:Sequence[int]def_check_input_dim(self,input):raiseNotImplementedErrordefforward(self,input:Tensor)->Tensor:self._check_input_dim(input)returnF.pad(input,self.padding,'circular')defextra_repr(self)->str:returnf'{self.padding}'classCircularPad1d(_CircularPadNd):r"""Pads the input tensor using circular padding of the input boundary. Tensor values at the beginning of the dimension are used to pad the end, and values at the end are used to pad the beginning. If negative padding is applied then the ends of the tensor get removed. For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. Args: padding (int, tuple): the size of the padding. If is `int`, uses the same padding in all boundaries. If a 2-`tuple`, uses (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`) Shape: - Input: :math:`(C, W_{in})` or :math:`(N, C, W_{in})`. - Output: :math:`(C, W_{out})` or :math:`(N, C, W_{out})`, where :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` Examples:: >>> # xdoctest: +IGNORE_WANT("not sure why xdoctest is choking on this") >>> m = nn.CircularPad1d(2) >>> input = torch.arange(8, dtype=torch.float).reshape(1, 2, 4) >>> input tensor([[[0., 1., 2., 3.], [4., 5., 6., 7.]]]) >>> m(input) tensor([[[2., 3., 0., 1., 2., 3., 0., 1.], [6., 7., 4., 5., 6., 7., 4., 5.]]]) >>> # using different paddings for different sides >>> m = nn.CircularPad1d((3, 1)) >>> m(input) tensor([[[1., 2., 3., 0., 1., 2., 3., 0.], [5., 6., 7., 4., 5., 6., 7., 4.]]]) """padding:Tuple[int,int]def__init__(self,padding:_size_2_t)->None:super().__init__()self.padding=_pair(padding)def_check_input_dim(self,input):ifinput.dim()!=2andinput.dim()!=3:raiseValueError(f"expected 2D or 3D input (got {input.dim()}D input)")classCircularPad2d(_CircularPadNd):r"""Pads the input tensor using circular padding of the input boundary. Tensor values at the beginning of the dimension are used to pad the end, and values at the end are used to pad the beginning. If negative padding is applied then the ends of the tensor get removed. For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. Args: padding (int, tuple): the size of the padding. If is `int`, uses the same padding in all boundaries. If a 4-`tuple`, uses (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`, :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`) Shape: - Input: :math:`(N, C, H_{in}, W_{in})` or :math:`(C, H_{in}, W_{in})`. - Output: :math:`(N, C, H_{out}, W_{out})` or :math:`(C, H_{out}, W_{out})`, where :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` Examples:: >>> m = nn.CircularPad2d(2) >>> input = torch.arange(9, dtype=torch.float).reshape(1, 1, 3, 3) >>> input tensor([[[[0., 1., 2.], [3., 4., 5.], [6., 7., 8.]]]]) >>> m(input) tensor([[[[4., 5., 3., 4., 5., 3., 4.], [7., 8., 6., 7., 8., 6., 7.], [1., 2., 0., 1., 2., 0., 1.], [4., 5., 3., 4., 5., 3., 4.], [7., 8., 6., 7., 8., 6., 7.], [1., 2., 0., 1., 2., 0., 1.], [4., 5., 3., 4., 5., 3., 4.]]]]) >>> # using different paddings for different sides >>> m = nn.CircularPad2d((1, 1, 2, 0)) >>> m(input) tensor([[[[5., 3., 4., 5., 3.], [8., 6., 7., 8., 6.], [2., 0., 1., 2., 0.], [5., 3., 4., 5., 3.], [8., 6., 7., 8., 6.]]]]) """padding:Tuple[int,int,int,int]def__init__(self,padding:_size_4_t)->None:super().__init__()self.padding=_quadruple(padding)def_check_input_dim(self,input):ifinput.dim()!=3andinput.dim()!=4:raiseValueError(f"expected 3D or 4D input (got {input.dim()}D input)")classCircularPad3d(_CircularPadNd):r"""Pads the input tensor using circular padding of the input boundary. Tensor values at the beginning of the dimension are used to pad the end, and values at the end are used to pad the beginning. If negative padding is applied then the ends of the tensor get removed. For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. Args: padding (int, tuple): the size of the padding. If is `int`, uses the same padding in all boundaries. If a 6-`tuple`, uses (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`, :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`, :math:`\text{padding\_front}`, :math:`\text{padding\_back}`) Shape: - Input: :math:`(N, C, D_{in}, H_{in}, W_{in})` or :math:`(C, D_{in}, H_{in}, W_{in})`. - Output: :math:`(N, C, D_{out}, H_{out}, W_{out})` or :math:`(C, D_{out}, H_{out}, W_{out})`, where :math:`D_{out} = D_{in} + \text{padding\_front} + \text{padding\_back}` :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` Examples:: >>> # xdoctest: +IGNORE_WANT("non-deterministic") >>> m = nn.CircularPad3d(3) >>> input = torch.randn(16, 3, 8, 320, 480) >>> output = m(input) >>> # using different paddings for different sides >>> m = nn.CircularPad3d((3, 3, 6, 6, 1, 1)) >>> output = m(input) """padding:Tuple[int,int,int,int,int,int]def__init__(self,padding:_size_6_t)->None:super().__init__()self.padding=_ntuple(6)(padding)def_check_input_dim(self,input):ifinput.dim()!=4andinput.dim()!=5:raiseValueError(f"expected 4D or 5D input (got {input.dim()}D input)")class_ConstantPadNd(Module):__constants__=['padding','value']value:floatpadding:Sequence[int]def__init__(self,value:float)->None:super().__init__()self.value=valuedefforward(self,input:Tensor)->Tensor:returnF.pad(input,self.padding,'constant',self.value)defextra_repr(self)->str:returnf'padding={self.padding}, value={self.value}'
[docs]classConstantPad1d(_ConstantPadNd):r"""Pads the input tensor boundaries with a constant value. For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. Args: padding (int, tuple): the size of the padding. If is `int`, uses the same padding in both boundaries. If a 2-`tuple`, uses (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`) Shape: - Input: :math:`(C, W_{in})` or :math:`(N, C, W_{in})`. - Output: :math:`(C, W_{out})` or :math:`(N, C, W_{out})`, where :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` Examples:: >>> # xdoctest: +IGNORE_WANT("non-deterministic") >>> m = nn.ConstantPad1d(2, 3.5) >>> input = torch.randn(1, 2, 4) >>> input tensor([[[-1.0491, -0.7152, -0.0749, 0.8530], [-1.3287, 1.8966, 0.1466, -0.2771]]]) >>> m(input) tensor([[[ 3.5000, 3.5000, -1.0491, -0.7152, -0.0749, 0.8530, 3.5000, 3.5000], [ 3.5000, 3.5000, -1.3287, 1.8966, 0.1466, -0.2771, 3.5000, 3.5000]]]) >>> m = nn.ConstantPad1d(2, 3.5) >>> input = torch.randn(1, 2, 3) >>> input tensor([[[ 1.6616, 1.4523, -1.1255], [-3.6372, 0.1182, -1.8652]]]) >>> m(input) tensor([[[ 3.5000, 3.5000, 1.6616, 1.4523, -1.1255, 3.5000, 3.5000], [ 3.5000, 3.5000, -3.6372, 0.1182, -1.8652, 3.5000, 3.5000]]]) >>> # using different paddings for different sides >>> m = nn.ConstantPad1d((3, 1), 3.5) >>> m(input) tensor([[[ 3.5000, 3.5000, 3.5000, 1.6616, 1.4523, -1.1255, 3.5000], [ 3.5000, 3.5000, 3.5000, -3.6372, 0.1182, -1.8652, 3.5000]]]) """padding:Tuple[int,int]def__init__(self,padding:_size_2_t,value:float):super().__init__(value)self.padding=_pair(padding)
[docs]classConstantPad2d(_ConstantPadNd):r"""Pads the input tensor boundaries with a constant value. For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. Args: padding (int, tuple): the size of the padding. If is `int`, uses the same padding in all boundaries. If a 4-`tuple`, uses (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`, :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`) Shape: - Input: :math:`(N, C, H_{in}, W_{in})` or :math:`(C, H_{in}, W_{in})`. - Output: :math:`(N, C, H_{out}, W_{out})` or :math:`(C, H_{out}, W_{out})`, where :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` Examples:: >>> # xdoctest: +IGNORE_WANT("non-deterministic") >>> m = nn.ConstantPad2d(2, 3.5) >>> input = torch.randn(1, 2, 2) >>> input tensor([[[ 1.6585, 0.4320], [-0.8701, -0.4649]]]) >>> m(input) tensor([[[ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], [ 3.5000, 3.5000, 1.6585, 0.4320, 3.5000, 3.5000], [ 3.5000, 3.5000, -0.8701, -0.4649, 3.5000, 3.5000], [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000, 3.5000]]]) >>> # using different paddings for different sides >>> m = nn.ConstantPad2d((3, 0, 2, 1), 3.5) >>> m(input) tensor([[[ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], [ 3.5000, 3.5000, 3.5000, 1.6585, 0.4320], [ 3.5000, 3.5000, 3.5000, -0.8701, -0.4649], [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000]]]) """__constants__=['padding','value']padding:Tuple[int,int,int,int]def__init__(self,padding:_size_4_t,value:float)->None:super().__init__(value)self.padding=_quadruple(padding)
[docs]classConstantPad3d(_ConstantPadNd):r"""Pads the input tensor boundaries with a constant value. For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. Args: padding (int, tuple): the size of the padding. If is `int`, uses the same padding in all boundaries. If a 6-`tuple`, uses (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`, :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`, :math:`\text{padding\_front}`, :math:`\text{padding\_back}`) Shape: - Input: :math:`(N, C, D_{in}, H_{in}, W_{in})` or :math:`(C, D_{in}, H_{in}, W_{in})`. - Output: :math:`(N, C, D_{out}, H_{out}, W_{out})` or :math:`(C, D_{out}, H_{out}, W_{out})`, where :math:`D_{out} = D_{in} + \text{padding\_front} + \text{padding\_back}` :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` Examples:: >>> m = nn.ConstantPad3d(3, 3.5) >>> input = torch.randn(16, 3, 10, 20, 30) >>> output = m(input) >>> # using different paddings for different sides >>> m = nn.ConstantPad3d((3, 3, 6, 6, 0, 1), 3.5) >>> output = m(input) """padding:Tuple[int,int,int,int,int,int]def__init__(self,padding:_size_6_t,value:float)->None:super().__init__(value)self.padding=_ntuple(6)(padding)
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: Cookies Policy.