pytorch
112 строк · 3.5 Кб
1# mypy: allow-untyped-defs
2import math
3from numbers import Number, Real
4
5import torch
6from torch.distributions import constraints
7from torch.distributions.exp_family import ExponentialFamily
8from torch.distributions.utils import _standard_normal, broadcast_all
9from torch.types import _size
10
11
12__all__ = ["Normal"]
13
14
15class Normal(ExponentialFamily):
16r"""
17Creates a normal (also called Gaussian) distribution parameterized by
18:attr:`loc` and :attr:`scale`.
19
20Example::
21
22>>> # xdoctest: +IGNORE_WANT("non-deterministic")
23>>> m = Normal(torch.tensor([0.0]), torch.tensor([1.0]))
24>>> m.sample() # normally distributed with loc=0 and scale=1
25tensor([ 0.1046])
26
27Args:
28loc (float or Tensor): mean of the distribution (often referred to as mu)
29scale (float or Tensor): standard deviation of the distribution
30(often referred to as sigma)
31"""
32arg_constraints = {"loc": constraints.real, "scale": constraints.positive}
33support = constraints.real
34has_rsample = True
35_mean_carrier_measure = 0
36
37@property
38def mean(self):
39return self.loc
40
41@property
42def mode(self):
43return self.loc
44
45@property
46def stddev(self):
47return self.scale
48
49@property
50def variance(self):
51return self.stddev.pow(2)
52
53def __init__(self, loc, scale, validate_args=None):
54self.loc, self.scale = broadcast_all(loc, scale)
55if isinstance(loc, Number) and isinstance(scale, Number):
56batch_shape = torch.Size()
57else:
58batch_shape = self.loc.size()
59super().__init__(batch_shape, validate_args=validate_args)
60
61def expand(self, batch_shape, _instance=None):
62new = self._get_checked_instance(Normal, _instance)
63batch_shape = torch.Size(batch_shape)
64new.loc = self.loc.expand(batch_shape)
65new.scale = self.scale.expand(batch_shape)
66super(Normal, new).__init__(batch_shape, validate_args=False)
67new._validate_args = self._validate_args
68return new
69
70def sample(self, sample_shape=torch.Size()):
71shape = self._extended_shape(sample_shape)
72with torch.no_grad():
73return torch.normal(self.loc.expand(shape), self.scale.expand(shape))
74
75def rsample(self, sample_shape: _size = torch.Size()) -> torch.Tensor:
76shape = self._extended_shape(sample_shape)
77eps = _standard_normal(shape, dtype=self.loc.dtype, device=self.loc.device)
78return self.loc + eps * self.scale
79
80def log_prob(self, value):
81if self._validate_args:
82self._validate_sample(value)
83# compute the variance
84var = self.scale**2
85log_scale = (
86math.log(self.scale) if isinstance(self.scale, Real) else self.scale.log()
87)
88return (
89-((value - self.loc) ** 2) / (2 * var)
90- log_scale
91- math.log(math.sqrt(2 * math.pi))
92)
93
94def cdf(self, value):
95if self._validate_args:
96self._validate_sample(value)
97return 0.5 * (
981 + torch.erf((value - self.loc) * self.scale.reciprocal() / math.sqrt(2))
99)
100
101def icdf(self, value):
102return self.loc + self.scale * torch.erfinv(2 * value - 1) * math.sqrt(2)
103
104def entropy(self):
105return 0.5 + 0.5 * math.log(2 * math.pi) + torch.log(self.scale)
106
107@property
108def _natural_params(self):
109return (self.loc / self.scale.pow(2), -0.5 * self.scale.pow(2).reciprocal())
110
111def _log_normalizer(self, x, y):
112return -0.25 * x.pow(2) / y + 0.5 * torch.log(-math.pi / y)
113