pytorch
62 строки · 1.9 Кб
1from torch.distributions import constraints
2from torch.distributions.normal import Normal
3from torch.distributions.transformed_distribution import TransformedDistribution
4from torch.distributions.transforms import ExpTransform
5
6__all__ = ["LogNormal"]
7
8
9class LogNormal(TransformedDistribution):
10r"""
11Creates a log-normal distribution parameterized by
12:attr:`loc` and :attr:`scale` where::
13
14X ~ Normal(loc, scale)
15Y = exp(X) ~ LogNormal(loc, scale)
16
17Example::
18
19>>> # xdoctest: +IGNORE_WANT("non-deterministic")
20>>> m = LogNormal(torch.tensor([0.0]), torch.tensor([1.0]))
21>>> m.sample() # log-normal distributed with mean=0 and stddev=1
22tensor([ 0.1046])
23
24Args:
25loc (float or Tensor): mean of log of distribution
26scale (float or Tensor): standard deviation of log of the distribution
27"""
28arg_constraints = {"loc": constraints.real, "scale": constraints.positive}
29support = constraints.positive
30has_rsample = True
31
32def __init__(self, loc, scale, validate_args=None):
33base_dist = Normal(loc, scale, validate_args=validate_args)
34super().__init__(base_dist, ExpTransform(), validate_args=validate_args)
35
36def expand(self, batch_shape, _instance=None):
37new = self._get_checked_instance(LogNormal, _instance)
38return super().expand(batch_shape, _instance=new)
39
40@property
41def loc(self):
42return self.base_dist.loc
43
44@property
45def scale(self):
46return self.base_dist.scale
47
48@property
49def mean(self):
50return (self.loc + self.scale.pow(2) / 2).exp()
51
52@property
53def mode(self):
54return (self.loc - self.scale.square()).exp()
55
56@property
57def variance(self):
58scale_sq = self.scale.pow(2)
59return scale_sq.expm1() * (2 * self.loc + scale_sq).exp()
60
61def entropy(self):
62return self.base_dist.entropy() + self.loc
63