importtorchfromtorch.autogradimportFunctionfromtorch.autograd.functionimportonce_differentiablefromtorch.distributionsimportconstraintsfromtorch.distributions.exp_familyimportExponentialFamily__all__=["Dirichlet"]# This helper is exposed for testing.def_Dirichlet_backward(x,concentration,grad_output):total=concentration.sum(-1,True).expand_as(concentration)grad=torch._dirichlet_grad(x,concentration,total)returngrad*(grad_output-(x*grad_output).sum(-1,True))class_Dirichlet(Function):@staticmethoddefforward(ctx,concentration):x=torch._sample_dirichlet(concentration)ctx.save_for_backward(x,concentration)returnx@staticmethod@once_differentiabledefbackward(ctx,grad_output):x,concentration=ctx.saved_tensorsreturn_Dirichlet_backward(x,concentration,grad_output)
[docs]classDirichlet(ExponentialFamily):r""" Creates a Dirichlet distribution parameterized by concentration :attr:`concentration`. Example:: >>> # xdoctest: +IGNORE_WANT("non-deterinistic") >>> m = Dirichlet(torch.tensor([0.5, 0.5])) >>> m.sample() # Dirichlet distributed with concentration [0.5, 0.5] tensor([ 0.1046, 0.8954]) Args: concentration (Tensor): concentration parameter of the distribution (often referred to as alpha) """arg_constraints={"concentration":constraints.independent(constraints.positive,1)}support=constraints.simplexhas_rsample=Truedef__init__(self,concentration,validate_args=None):ifconcentration.dim()<1:raiseValueError("`concentration` parameter must be at least one-dimensional.")self.concentration=concentrationbatch_shape,event_shape=concentration.shape[:-1],concentration.shape[-1:]super().__init__(batch_shape,event_shape,validate_args=validate_args)
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: Cookies Policy.