1
#include <c10/util/irange.h>
2
#include <pybind11/pytypes.h>
3
#include <torch/csrc/Size.h>
4
#include <torch/csrc/utils/pybind.h>
6
#include <torch/csrc/utils/object_ptr.h>
7
#include <torch/csrc/utils/python_arg_parser.h>
8
#include <torch/csrc/utils/python_numbers.h>
9
#include <torch/csrc/utils/python_strings.h>
10
#include <torch/csrc/utils/python_tuples.h>
13
#include <torch/csrc/autograd/python_variable.h>
14
#include <torch/csrc/jit/frontend/tracer.h>
20
PyObject* THPSize_New(const torch::autograd::Variable& var) {
21
if (!torch::jit::tracer::isTracing()) {
22
auto sizes = var.sizes();
23
return THPSize_NewFromSizes(var.dim(), sizes.data());
25
auto self = THPObjectPtr(THPSizeType.tp_alloc(&THPSizeType, var.dim()));
29
for (const auto i : c10::irange(var.dim())) {
30
PyObject* py_size_tensor =
31
THPVariable_Wrap(torch::jit::tracer::getSizeOf(var, i));
34
PyTuple_SET_ITEM(self.get(), i, py_size_tensor);
37
return self.release();
40
PyObject* THPSize_NewFromSizes(int64_t dim, const int64_t* sizes) {
41
auto self = THPObjectPtr(THPSizeType.tp_alloc(&THPSizeType, dim));
44
THPUtils_packInt64Array(self, dim, sizes);
45
return self.release();
48
PyObject* THPSize_NewFromSymSizes(const at::Tensor& self_) {
49
auto sym_sizes = self_.sym_sizes();
51
auto ret = THPObjectPtr(THPSizeType.tp_alloc(
52
&THPSizeType, static_cast<Py_ssize_t>(sym_sizes.size())));
56
for (auto i : c10::irange(sym_sizes.size())) {
57
auto si = sym_sizes[i];
58
if (si.is_symbolic()) {
63
!torch::jit::tracer::isTracing(),
64
"JIT Tracing of SymInts isn't supported");
65
auto py_symint = py::cast(si).release().ptr();
68
PyTuple_SET_ITEM(ret.get(), i, py_symint);
71
auto m = si.maybe_as_int();
72
if (torch::jit::tracer::isTracing()) {
73
PyObject* py_size_tensor = THPVariable_Wrap(
74
torch::jit::tracer::getSizeOf(self_, static_cast<int64_t>(i)));
77
PyTuple_SET_ITEM(ret.get(), i, py_size_tensor);
80
PyTuple_SET_ITEM(ret.get(), i, THPUtils_packInt64(*m));
87
static bool isTracedZeroDimVar(PyObject* item) {
88
if (!THPVariable_Check(item))
90
auto& var = THPVariable_Unpack(item);
91
return var.dim() == 0 && torch::jit::tracer::getValueTrace(var);
94
static PyObject* THPSize_pynew(
99
THPObjectPtr self(PyTuple_Type.tp_new(type, args, kwargs));
101
for (Py_ssize_t i = 0; i < PyTuple_Size(self); ++i) {
102
PyObject* item = PyTuple_GET_ITEM(self.get(), i);
103
if (THPUtils_checkLong(item)) {
106
if (torch::is_symint(item)) {
109
if (torch::jit::tracer::isTracing() && isTracedZeroDimVar(item)) {
113
THPObjectPtr number(PyNumber_Index(item));
114
if (number && THPUtils_checkLong(number.get())) {
115
Py_INCREF(number.get());
116
auto status = PyTuple_SetItem(self, i, number.get());
118
throw python_error();
124
"torch.Size() takes an iterable of 'int' (item %zd is '%s')",
126
Py_TYPE(item)->tp_name);
129
return self.release();
133
static PyObject* THPSize_repr(THPSize* self) {
135
std::string repr("torch.Size([");
136
for (Py_ssize_t i = 0; i < PyTuple_Size((PyObject*)self); ++i) {
140
auto item = PyTuple_GET_ITEM(self, i);
141
auto ih = py::handle(item);
143
repr += torch::is_symint(ih)
144
? std::string(py::str(ih))
145
: std::to_string(THPUtils_unpackLong(PyTuple_GET_ITEM(self, i)));
148
return THPUtils_packString(repr);
152
extern PyTypeObject THPSizeType;
154
template <typename FnType, FnType fn, typename... Args>
155
static PyObject* wrap_tuple_fn(Args... args) {
156
THPObjectPtr result((*fn)(std::forward<Args>(args)...));
159
if (PyTuple_Check(result.get())) {
160
return PyObject_CallFunctionObjArgs(
161
(PyObject*)&THPSizeType, result.get(), nullptr);
163
return result.release();
169
auto sq_concat = PyTuple_Type.tp_as_sequence->sq_concat;
170
auto sq_repeat = PyTuple_Type.tp_as_sequence->sq_repeat;
171
binaryfunc mp_subscript = PyTuple_Type.tp_as_mapping->mp_subscript;
174
static PySequenceMethods THPSize_as_sequence = {
176
wrap_tuple_fn<decltype(&sq_concat), &sq_concat>,
177
wrap_tuple_fn<decltype(&sq_repeat), &sq_repeat>,
185
static PyMappingMethods THPSize_as_mapping = {
187
wrap_tuple_fn<decltype(&mp_subscript), &mp_subscript>,
190
static PyObject* THPSize_numel(PyObject* _self, PyObject* noargs) {
192
auto self = (THPSize*)_self;
194
for (Py_ssize_t i = 0; i < PyTuple_Size((PyObject*)self); ++i) {
195
numel *= THPUtils_unpackLong(PyTuple_GET_ITEM(self, i));
197
return THPUtils_packInt64(numel);
201
static PyObject* THPSize_reduce(PyObject* _self, PyObject* noargs) {
203
auto self = (THPSize*)_self;
204
auto ret = THPObjectPtr{PyTuple_New(2)};
206
throw python_error();
208
auto obj = (PyObject*)(&THPSizeType);
209
Py_INCREF(&THPSizeType);
210
PyTuple_SET_ITEM(ret.get(), 0, obj);
212
THPObjectPtr t(PyTuple_New(PyTuple_Size((PyObject*)self)));
214
throw python_error();
215
for (Py_ssize_t i = 0; i < PyTuple_Size((PyObject*)self); ++i) {
216
auto d = PyTuple_GET_ITEM(self, i);
218
PyTuple_SET_ITEM(t.get(), i, d);
221
THPObjectPtr dims(Py_BuildValue("(O)", t.get()));
223
throw python_error();
224
PyTuple_SET_ITEM(ret.get(), 1, dims.release());
226
return ret.release();
231
static PyMethodDef THPSize_methods[] = {
232
{"numel", THPSize_numel, METH_NOARGS, nullptr},
233
{"__reduce__", THPSize_reduce, METH_NOARGS, nullptr},
236
PyTypeObject THPSizeType = {
237
PyVarObject_HEAD_INIT(nullptr, 0) "torch.Size",
245
(reprfunc)THPSize_repr,
247
&THPSize_as_sequence,
276
void THPSize_init(PyObject* module) {
277
if (PyType_Ready(&THPSizeType) < 0) {
278
throw python_error();
280
Py_INCREF(&THPSizeType);
281
if (PyModule_AddObject(module, "Size", (PyObject*)&THPSizeType) < 0) {
282
throw python_error();