matrix.h 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714
  1. /*
  2. pybind11/eigen/matrix.h: Transparent conversion for dense and sparse Eigen matrices
  3. Copyright (c) 2016 Wenzel Jakob <wenzel.jakob@epfl.ch>
  4. All rights reserved. Use of this source code is governed by a
  5. BSD-style license that can be found in the LICENSE file.
  6. */
  7. #pragma once
  8. #include "../numpy.h"
  9. #include "common.h"
  10. /* HINT: To suppress warnings originating from the Eigen headers, use -isystem.
  11. See also:
  12. https://stackoverflow.com/questions/2579576/i-dir-vs-isystem-dir
  13. https://stackoverflow.com/questions/1741816/isystem-for-ms-visual-studio-c-compiler
  14. */
  15. PYBIND11_WARNING_PUSH
  16. PYBIND11_WARNING_DISABLE_MSVC(5054) // https://github.com/pybind/pybind11/pull/3741
  17. // C5054: operator '&': deprecated between enumerations of different types
  18. #if defined(__MINGW32__)
  19. PYBIND11_WARNING_DISABLE_GCC("-Wmaybe-uninitialized")
  20. #endif
  21. #include <Eigen/Core>
  22. #include <Eigen/SparseCore>
  23. PYBIND11_WARNING_POP
  24. // Eigen prior to 3.2.7 doesn't have proper move constructors--but worse, some classes get implicit
  25. // move constructors that break things. We could detect this an explicitly copy, but an extra copy
  26. // of matrices seems highly undesirable.
  27. static_assert(EIGEN_VERSION_AT_LEAST(3, 2, 7),
  28. "Eigen matrix support in pybind11 requires Eigen >= 3.2.7");
  29. PYBIND11_NAMESPACE_BEGIN(PYBIND11_NAMESPACE)
  30. PYBIND11_WARNING_DISABLE_MSVC(4127)
  31. // Provide a convenience alias for easier pass-by-ref usage with fully dynamic strides:
  32. using EigenDStride = Eigen::Stride<Eigen::Dynamic, Eigen::Dynamic>;
  33. template <typename MatrixType>
  34. using EigenDRef = Eigen::Ref<MatrixType, 0, EigenDStride>;
  35. template <typename MatrixType>
  36. using EigenDMap = Eigen::Map<MatrixType, 0, EigenDStride>;
  37. PYBIND11_NAMESPACE_BEGIN(detail)
  38. #if EIGEN_VERSION_AT_LEAST(3, 3, 0)
  39. using EigenIndex = Eigen::Index;
  40. template <typename Scalar, int Flags, typename StorageIndex>
  41. using EigenMapSparseMatrix = Eigen::Map<Eigen::SparseMatrix<Scalar, Flags, StorageIndex>>;
  42. #else
  43. using EigenIndex = EIGEN_DEFAULT_DENSE_INDEX_TYPE;
  44. template <typename Scalar, int Flags, typename StorageIndex>
  45. using EigenMapSparseMatrix = Eigen::MappedSparseMatrix<Scalar, Flags, StorageIndex>;
  46. #endif
  47. // Matches Eigen::Map, Eigen::Ref, blocks, etc:
  48. template <typename T>
  49. using is_eigen_dense_map = all_of<is_template_base_of<Eigen::DenseBase, T>,
  50. std::is_base_of<Eigen::MapBase<T, Eigen::ReadOnlyAccessors>, T>>;
  51. template <typename T>
  52. using is_eigen_mutable_map = std::is_base_of<Eigen::MapBase<T, Eigen::WriteAccessors>, T>;
  53. template <typename T>
  54. using is_eigen_dense_plain
  55. = all_of<negation<is_eigen_dense_map<T>>, is_template_base_of<Eigen::PlainObjectBase, T>>;
  56. template <typename T>
  57. using is_eigen_sparse = is_template_base_of<Eigen::SparseMatrixBase, T>;
  58. // Test for objects inheriting from EigenBase<Derived> that aren't captured by the above. This
  59. // basically covers anything that can be assigned to a dense matrix but that don't have a typical
  60. // matrix data layout that can be copied from their .data(). For example, DiagonalMatrix and
  61. // SelfAdjointView fall into this category.
  62. template <typename T>
  63. using is_eigen_other
  64. = all_of<is_template_base_of<Eigen::EigenBase, T>,
  65. negation<any_of<is_eigen_dense_map<T>, is_eigen_dense_plain<T>, is_eigen_sparse<T>>>>;
  66. // Captures numpy/eigen conformability status (returned by EigenProps::conformable()):
  67. template <bool EigenRowMajor>
  68. struct EigenConformable {
  69. bool conformable = false;
  70. EigenIndex rows = 0, cols = 0;
  71. EigenDStride stride{0, 0}; // Only valid if negativestrides is false!
  72. bool negativestrides = false; // If true, do not use stride!
  73. // NOLINTNEXTLINE(google-explicit-constructor)
  74. EigenConformable(bool fits = false) : conformable{fits} {}
  75. // Matrix type:
  76. EigenConformable(EigenIndex r, EigenIndex c, EigenIndex rstride, EigenIndex cstride)
  77. : conformable{true}, rows{r}, cols{c},
  78. // TODO: when Eigen bug #747 is fixed, remove the tests for non-negativity.
  79. // http://eigen.tuxfamily.org/bz/show_bug.cgi?id=747
  80. stride{EigenRowMajor ? (rstride > 0 ? rstride : 0)
  81. : (cstride > 0 ? cstride : 0) /* outer stride */,
  82. EigenRowMajor ? (cstride > 0 ? cstride : 0)
  83. : (rstride > 0 ? rstride : 0) /* inner stride */},
  84. negativestrides{rstride < 0 || cstride < 0} {}
  85. // Vector type:
  86. EigenConformable(EigenIndex r, EigenIndex c, EigenIndex stride)
  87. : EigenConformable(r, c, r == 1 ? c * stride : stride, c == 1 ? r : r * stride) {}
  88. template <typename props>
  89. bool stride_compatible() const {
  90. // To have compatible strides, we need (on both dimensions) one of fully dynamic strides,
  91. // matching strides, or a dimension size of 1 (in which case the stride value is
  92. // irrelevant). Alternatively, if any dimension size is 0, the strides are not relevant
  93. // (and numpy ≥ 1.23 sets the strides to 0 in that case, so we need to check explicitly).
  94. if (negativestrides) {
  95. return false;
  96. }
  97. if (rows == 0 || cols == 0) {
  98. return true;
  99. }
  100. return (props::inner_stride == Eigen::Dynamic || props::inner_stride == stride.inner()
  101. || (EigenRowMajor ? cols : rows) == 1)
  102. && (props::outer_stride == Eigen::Dynamic || props::outer_stride == stride.outer()
  103. || (EigenRowMajor ? rows : cols) == 1);
  104. }
  105. // NOLINTNEXTLINE(google-explicit-constructor)
  106. operator bool() const { return conformable; }
  107. };
  108. template <typename Type>
  109. struct eigen_extract_stride {
  110. using type = Type;
  111. };
  112. template <typename PlainObjectType, int MapOptions, typename StrideType>
  113. struct eigen_extract_stride<Eigen::Map<PlainObjectType, MapOptions, StrideType>> {
  114. using type = StrideType;
  115. };
  116. template <typename PlainObjectType, int Options, typename StrideType>
  117. struct eigen_extract_stride<Eigen::Ref<PlainObjectType, Options, StrideType>> {
  118. using type = StrideType;
  119. };
  120. // Helper struct for extracting information from an Eigen type
  121. template <typename Type_>
  122. struct EigenProps {
  123. using Type = Type_;
  124. using Scalar = typename Type::Scalar;
  125. using StrideType = typename eigen_extract_stride<Type>::type;
  126. static constexpr EigenIndex rows = Type::RowsAtCompileTime, cols = Type::ColsAtCompileTime,
  127. size = Type::SizeAtCompileTime;
  128. static constexpr bool row_major = Type::IsRowMajor,
  129. vector
  130. = Type::IsVectorAtCompileTime, // At least one dimension has fixed size 1
  131. fixed_rows = rows != Eigen::Dynamic, fixed_cols = cols != Eigen::Dynamic,
  132. fixed = size != Eigen::Dynamic, // Fully-fixed size
  133. dynamic = !fixed_rows && !fixed_cols; // Fully-dynamic size
  134. template <EigenIndex i, EigenIndex ifzero>
  135. using if_zero = std::integral_constant<EigenIndex, i == 0 ? ifzero : i>;
  136. static constexpr EigenIndex inner_stride
  137. = if_zero<StrideType::InnerStrideAtCompileTime, 1>::value,
  138. outer_stride = if_zero < StrideType::OuterStrideAtCompileTime,
  139. vector ? size
  140. : row_major ? cols
  141. : rows > ::value;
  142. static constexpr bool dynamic_stride
  143. = inner_stride == Eigen::Dynamic && outer_stride == Eigen::Dynamic;
  144. static constexpr bool requires_row_major
  145. = !dynamic_stride && !vector && (row_major ? inner_stride : outer_stride) == 1;
  146. static constexpr bool requires_col_major
  147. = !dynamic_stride && !vector && (row_major ? outer_stride : inner_stride) == 1;
  148. // Takes an input array and determines whether we can make it fit into the Eigen type. If
  149. // the array is a vector, we attempt to fit it into either an Eigen 1xN or Nx1 vector
  150. // (preferring the latter if it will fit in either, i.e. for a fully dynamic matrix type).
  151. static EigenConformable<row_major> conformable(const array &a) {
  152. const auto dims = a.ndim();
  153. if (dims < 1 || dims > 2) {
  154. return false;
  155. }
  156. if (dims == 2) { // Matrix type: require exact match (or dynamic)
  157. EigenIndex np_rows = a.shape(0), np_cols = a.shape(1),
  158. np_rstride = a.strides(0) / static_cast<ssize_t>(sizeof(Scalar)),
  159. np_cstride = a.strides(1) / static_cast<ssize_t>(sizeof(Scalar));
  160. if ((fixed_rows && np_rows != rows) || (fixed_cols && np_cols != cols)) {
  161. return false;
  162. }
  163. return {np_rows, np_cols, np_rstride, np_cstride};
  164. }
  165. // Otherwise we're storing an n-vector. Only one of the strides will be used, but
  166. // whichever is used, we want the (single) numpy stride value.
  167. const EigenIndex n = a.shape(0),
  168. stride = a.strides(0) / static_cast<ssize_t>(sizeof(Scalar));
  169. if (vector) { // Eigen type is a compile-time vector
  170. if (fixed && size != n) {
  171. return false; // Vector size mismatch
  172. }
  173. return {rows == 1 ? 1 : n, cols == 1 ? 1 : n, stride};
  174. }
  175. if (fixed) {
  176. // The type has a fixed size, but is not a vector: abort
  177. return false;
  178. }
  179. if (fixed_cols) {
  180. // Since this isn't a vector, cols must be != 1. We allow this only if it exactly
  181. // equals the number of elements (rows is Dynamic, and so 1 row is allowed).
  182. if (cols != n) {
  183. return false;
  184. }
  185. return {1, n, stride};
  186. } // Otherwise it's either fully dynamic, or column dynamic; both become a column vector
  187. if (fixed_rows && rows != n) {
  188. return false;
  189. }
  190. return {n, 1, stride};
  191. }
  192. static constexpr bool show_writeable
  193. = is_eigen_dense_map<Type>::value && is_eigen_mutable_map<Type>::value;
  194. static constexpr bool show_order = is_eigen_dense_map<Type>::value;
  195. static constexpr bool show_c_contiguous = show_order && requires_row_major;
  196. static constexpr bool show_f_contiguous
  197. = !show_c_contiguous && show_order && requires_col_major;
  198. static constexpr auto descriptor
  199. = const_name("numpy.ndarray[") + npy_format_descriptor<Scalar>::name + const_name("[")
  200. + const_name<fixed_rows>(const_name<(size_t) rows>(), const_name("m")) + const_name(", ")
  201. + const_name<fixed_cols>(const_name<(size_t) cols>(), const_name("n")) + const_name("]")
  202. +
  203. // For a reference type (e.g. Ref<MatrixXd>) we have other constraints that might need to
  204. // be satisfied: writeable=True (for a mutable reference), and, depending on the map's
  205. // stride options, possibly f_contiguous or c_contiguous. We include them in the
  206. // descriptor output to provide some hint as to why a TypeError is occurring (otherwise
  207. // it can be confusing to see that a function accepts a 'numpy.ndarray[float64[3,2]]' and
  208. // an error message that you *gave* a numpy.ndarray of the right type and dimensions.
  209. const_name<show_writeable>(", flags.writeable", "")
  210. + const_name<show_c_contiguous>(", flags.c_contiguous", "")
  211. + const_name<show_f_contiguous>(", flags.f_contiguous", "") + const_name("]");
  212. };
  213. // Casts an Eigen type to numpy array. If given a base, the numpy array references the src data,
  214. // otherwise it'll make a copy. writeable lets you turn off the writeable flag for the array.
  215. template <typename props>
  216. handle
  217. eigen_array_cast(typename props::Type const &src, handle base = handle(), bool writeable = true) {
  218. constexpr ssize_t elem_size = sizeof(typename props::Scalar);
  219. array a;
  220. if (props::vector) {
  221. a = array({src.size()}, {elem_size * src.innerStride()}, src.data(), base);
  222. } else {
  223. a = array({src.rows(), src.cols()},
  224. {elem_size * src.rowStride(), elem_size * src.colStride()},
  225. src.data(),
  226. base);
  227. }
  228. if (!writeable) {
  229. array_proxy(a.ptr())->flags &= ~detail::npy_api::NPY_ARRAY_WRITEABLE_;
  230. }
  231. return a.release();
  232. }
  233. // Takes an lvalue ref to some Eigen type and a (python) base object, creating a numpy array that
  234. // reference the Eigen object's data with `base` as the python-registered base class (if omitted,
  235. // the base will be set to None, and lifetime management is up to the caller). The numpy array is
  236. // non-writeable if the given type is const.
  237. template <typename props, typename Type>
  238. handle eigen_ref_array(Type &src, handle parent = none()) {
  239. // none here is to get past array's should-we-copy detection, which currently always
  240. // copies when there is no base. Setting the base to None should be harmless.
  241. return eigen_array_cast<props>(src, parent, !std::is_const<Type>::value);
  242. }
  243. // Takes a pointer to some dense, plain Eigen type, builds a capsule around it, then returns a
  244. // numpy array that references the encapsulated data with a python-side reference to the capsule to
  245. // tie its destruction to that of any dependent python objects. Const-ness is determined by
  246. // whether or not the Type of the pointer given is const.
  247. template <typename props, typename Type, typename = enable_if_t<is_eigen_dense_plain<Type>::value>>
  248. handle eigen_encapsulate(Type *src) {
  249. capsule base(src, [](void *o) { delete static_cast<Type *>(o); });
  250. return eigen_ref_array<props>(*src, base);
  251. }
  252. // Type caster for regular, dense matrix types (e.g. MatrixXd), but not maps/refs/etc. of dense
  253. // types.
  254. template <typename Type>
  255. struct type_caster<Type, enable_if_t<is_eigen_dense_plain<Type>::value>> {
  256. using Scalar = typename Type::Scalar;
  257. static_assert(!std::is_pointer<Scalar>::value,
  258. PYBIND11_EIGEN_MESSAGE_POINTER_TYPES_ARE_NOT_SUPPORTED);
  259. using props = EigenProps<Type>;
  260. bool load(handle src, bool convert) {
  261. // If we're in no-convert mode, only load if given an array of the correct type
  262. if (!convert && !isinstance<array_t<Scalar>>(src)) {
  263. return false;
  264. }
  265. // Coerce into an array, but don't do type conversion yet; the copy below handles it.
  266. auto buf = array::ensure(src);
  267. if (!buf) {
  268. return false;
  269. }
  270. auto dims = buf.ndim();
  271. if (dims < 1 || dims > 2) {
  272. return false;
  273. }
  274. auto fits = props::conformable(buf);
  275. if (!fits) {
  276. return false;
  277. }
  278. // Allocate the new type, then build a numpy reference into it
  279. value = Type(fits.rows, fits.cols);
  280. auto ref = reinterpret_steal<array>(eigen_ref_array<props>(value));
  281. if (dims == 1) {
  282. ref = ref.squeeze();
  283. } else if (ref.ndim() == 1) {
  284. buf = buf.squeeze();
  285. }
  286. int result = detail::npy_api::get().PyArray_CopyInto_(ref.ptr(), buf.ptr());
  287. if (result < 0) { // Copy failed!
  288. PyErr_Clear();
  289. return false;
  290. }
  291. return true;
  292. }
  293. private:
  294. // Cast implementation
  295. template <typename CType>
  296. static handle cast_impl(CType *src, return_value_policy policy, handle parent) {
  297. switch (policy) {
  298. case return_value_policy::take_ownership:
  299. case return_value_policy::automatic:
  300. return eigen_encapsulate<props>(src);
  301. case return_value_policy::move:
  302. return eigen_encapsulate<props>(new CType(std::move(*src)));
  303. case return_value_policy::copy:
  304. return eigen_array_cast<props>(*src);
  305. case return_value_policy::reference:
  306. case return_value_policy::automatic_reference:
  307. return eigen_ref_array<props>(*src);
  308. case return_value_policy::reference_internal:
  309. return eigen_ref_array<props>(*src, parent);
  310. default:
  311. throw cast_error("unhandled return_value_policy: should not happen!");
  312. };
  313. }
  314. public:
  315. // Normal returned non-reference, non-const value:
  316. static handle cast(Type &&src, return_value_policy /* policy */, handle parent) {
  317. return cast_impl(&src, return_value_policy::move, parent);
  318. }
  319. // If you return a non-reference const, we mark the numpy array readonly:
  320. static handle cast(const Type &&src, return_value_policy /* policy */, handle parent) {
  321. return cast_impl(&src, return_value_policy::move, parent);
  322. }
  323. // lvalue reference return; default (automatic) becomes copy
  324. static handle cast(Type &src, return_value_policy policy, handle parent) {
  325. if (policy == return_value_policy::automatic
  326. || policy == return_value_policy::automatic_reference) {
  327. policy = return_value_policy::copy;
  328. }
  329. return cast_impl(&src, policy, parent);
  330. }
  331. // const lvalue reference return; default (automatic) becomes copy
  332. static handle cast(const Type &src, return_value_policy policy, handle parent) {
  333. if (policy == return_value_policy::automatic
  334. || policy == return_value_policy::automatic_reference) {
  335. policy = return_value_policy::copy;
  336. }
  337. return cast(&src, policy, parent);
  338. }
  339. // non-const pointer return
  340. static handle cast(Type *src, return_value_policy policy, handle parent) {
  341. return cast_impl(src, policy, parent);
  342. }
  343. // const pointer return
  344. static handle cast(const Type *src, return_value_policy policy, handle parent) {
  345. return cast_impl(src, policy, parent);
  346. }
  347. static constexpr auto name = props::descriptor;
  348. // NOLINTNEXTLINE(google-explicit-constructor)
  349. operator Type *() { return &value; }
  350. // NOLINTNEXTLINE(google-explicit-constructor)
  351. operator Type &() { return value; }
  352. // NOLINTNEXTLINE(google-explicit-constructor)
  353. operator Type &&() && { return std::move(value); }
  354. template <typename T>
  355. using cast_op_type = movable_cast_op_type<T>;
  356. private:
  357. Type value;
  358. };
  359. // Base class for casting reference/map/block/etc. objects back to python.
  360. template <typename MapType>
  361. struct eigen_map_caster {
  362. static_assert(!std::is_pointer<typename MapType::Scalar>::value,
  363. PYBIND11_EIGEN_MESSAGE_POINTER_TYPES_ARE_NOT_SUPPORTED);
  364. private:
  365. using props = EigenProps<MapType>;
  366. public:
  367. // Directly referencing a ref/map's data is a bit dangerous (whatever the map/ref points to has
  368. // to stay around), but we'll allow it under the assumption that you know what you're doing
  369. // (and have an appropriate keep_alive in place). We return a numpy array pointing directly at
  370. // the ref's data (The numpy array ends up read-only if the ref was to a const matrix type.)
  371. // Note that this means you need to ensure you don't destroy the object in some other way (e.g.
  372. // with an appropriate keep_alive, or with a reference to a statically allocated matrix).
  373. static handle cast(const MapType &src, return_value_policy policy, handle parent) {
  374. switch (policy) {
  375. case return_value_policy::copy:
  376. return eigen_array_cast<props>(src);
  377. case return_value_policy::reference_internal:
  378. return eigen_array_cast<props>(src, parent, is_eigen_mutable_map<MapType>::value);
  379. case return_value_policy::reference:
  380. case return_value_policy::automatic:
  381. case return_value_policy::automatic_reference:
  382. return eigen_array_cast<props>(src, none(), is_eigen_mutable_map<MapType>::value);
  383. default:
  384. // move, take_ownership don't make any sense for a ref/map:
  385. pybind11_fail("Invalid return_value_policy for Eigen Map/Ref/Block type");
  386. }
  387. }
  388. static constexpr auto name = props::descriptor;
  389. // Explicitly delete these: support python -> C++ conversion on these (i.e. these can be return
  390. // types but not bound arguments). We still provide them (with an explicitly delete) so that
  391. // you end up here if you try anyway.
  392. bool load(handle, bool) = delete;
  393. operator MapType() = delete;
  394. template <typename>
  395. using cast_op_type = MapType;
  396. };
  397. // We can return any map-like object (but can only load Refs, specialized next):
  398. template <typename Type>
  399. struct type_caster<Type, enable_if_t<is_eigen_dense_map<Type>::value>> : eigen_map_caster<Type> {};
  400. // Loader for Ref<...> arguments. See the documentation for info on how to make this work without
  401. // copying (it requires some extra effort in many cases).
  402. template <typename PlainObjectType, typename StrideType>
  403. struct type_caster<
  404. Eigen::Ref<PlainObjectType, 0, StrideType>,
  405. enable_if_t<is_eigen_dense_map<Eigen::Ref<PlainObjectType, 0, StrideType>>::value>>
  406. : public eigen_map_caster<Eigen::Ref<PlainObjectType, 0, StrideType>> {
  407. private:
  408. using Type = Eigen::Ref<PlainObjectType, 0, StrideType>;
  409. using props = EigenProps<Type>;
  410. using Scalar = typename props::Scalar;
  411. static_assert(!std::is_pointer<Scalar>::value,
  412. PYBIND11_EIGEN_MESSAGE_POINTER_TYPES_ARE_NOT_SUPPORTED);
  413. using MapType = Eigen::Map<PlainObjectType, 0, StrideType>;
  414. using Array
  415. = array_t<Scalar,
  416. array::forcecast
  417. | ((props::row_major ? props::inner_stride : props::outer_stride) == 1
  418. ? array::c_style
  419. : (props::row_major ? props::outer_stride : props::inner_stride) == 1
  420. ? array::f_style
  421. : 0)>;
  422. static constexpr bool need_writeable = is_eigen_mutable_map<Type>::value;
  423. // Delay construction (these have no default constructor)
  424. std::unique_ptr<MapType> map;
  425. std::unique_ptr<Type> ref;
  426. // Our array. When possible, this is just a numpy array pointing to the source data, but
  427. // sometimes we can't avoid copying (e.g. input is not a numpy array at all, has an
  428. // incompatible layout, or is an array of a type that needs to be converted). Using a numpy
  429. // temporary (rather than an Eigen temporary) saves an extra copy when we need both type
  430. // conversion and storage order conversion. (Note that we refuse to use this temporary copy
  431. // when loading an argument for a Ref<M> with M non-const, i.e. a read-write reference).
  432. Array copy_or_ref;
  433. public:
  434. bool load(handle src, bool convert) {
  435. // First check whether what we have is already an array of the right type. If not, we
  436. // can't avoid a copy (because the copy is also going to do type conversion).
  437. bool need_copy = !isinstance<Array>(src);
  438. EigenConformable<props::row_major> fits;
  439. if (!need_copy) {
  440. // We don't need a converting copy, but we also need to check whether the strides are
  441. // compatible with the Ref's stride requirements
  442. auto aref = reinterpret_borrow<Array>(src);
  443. if (aref && (!need_writeable || aref.writeable())) {
  444. fits = props::conformable(aref);
  445. if (!fits) {
  446. return false; // Incompatible dimensions
  447. }
  448. if (!fits.template stride_compatible<props>()) {
  449. need_copy = true;
  450. } else {
  451. copy_or_ref = std::move(aref);
  452. }
  453. } else {
  454. need_copy = true;
  455. }
  456. }
  457. if (need_copy) {
  458. // We need to copy: If we need a mutable reference, or we're not supposed to convert
  459. // (either because we're in the no-convert overload pass, or because we're explicitly
  460. // instructed not to copy (via `py::arg().noconvert()`) we have to fail loading.
  461. if (!convert || need_writeable) {
  462. return false;
  463. }
  464. Array copy = Array::ensure(src);
  465. if (!copy) {
  466. return false;
  467. }
  468. fits = props::conformable(copy);
  469. if (!fits || !fits.template stride_compatible<props>()) {
  470. return false;
  471. }
  472. copy_or_ref = std::move(copy);
  473. loader_life_support::add_patient(copy_or_ref);
  474. }
  475. ref.reset();
  476. map.reset(new MapType(data(copy_or_ref),
  477. fits.rows,
  478. fits.cols,
  479. make_stride(fits.stride.outer(), fits.stride.inner())));
  480. ref.reset(new Type(*map));
  481. return true;
  482. }
  483. // NOLINTNEXTLINE(google-explicit-constructor)
  484. operator Type *() { return ref.get(); }
  485. // NOLINTNEXTLINE(google-explicit-constructor)
  486. operator Type &() { return *ref; }
  487. template <typename _T>
  488. using cast_op_type = pybind11::detail::cast_op_type<_T>;
  489. private:
  490. template <typename T = Type, enable_if_t<is_eigen_mutable_map<T>::value, int> = 0>
  491. Scalar *data(Array &a) {
  492. return a.mutable_data();
  493. }
  494. template <typename T = Type, enable_if_t<!is_eigen_mutable_map<T>::value, int> = 0>
  495. const Scalar *data(Array &a) {
  496. return a.data();
  497. }
  498. // Attempt to figure out a constructor of `Stride` that will work.
  499. // If both strides are fixed, use a default constructor:
  500. template <typename S>
  501. using stride_ctor_default = bool_constant<S::InnerStrideAtCompileTime != Eigen::Dynamic
  502. && S::OuterStrideAtCompileTime != Eigen::Dynamic
  503. && std::is_default_constructible<S>::value>;
  504. // Otherwise, if there is a two-index constructor, assume it is (outer,inner) like
  505. // Eigen::Stride, and use it:
  506. template <typename S>
  507. using stride_ctor_dual
  508. = bool_constant<!stride_ctor_default<S>::value
  509. && std::is_constructible<S, EigenIndex, EigenIndex>::value>;
  510. // Otherwise, if there is a one-index constructor, and just one of the strides is dynamic, use
  511. // it (passing whichever stride is dynamic).
  512. template <typename S>
  513. using stride_ctor_outer
  514. = bool_constant<!any_of<stride_ctor_default<S>, stride_ctor_dual<S>>::value
  515. && S::OuterStrideAtCompileTime == Eigen::Dynamic
  516. && S::InnerStrideAtCompileTime != Eigen::Dynamic
  517. && std::is_constructible<S, EigenIndex>::value>;
  518. template <typename S>
  519. using stride_ctor_inner
  520. = bool_constant<!any_of<stride_ctor_default<S>, stride_ctor_dual<S>>::value
  521. && S::InnerStrideAtCompileTime == Eigen::Dynamic
  522. && S::OuterStrideAtCompileTime != Eigen::Dynamic
  523. && std::is_constructible<S, EigenIndex>::value>;
  524. template <typename S = StrideType, enable_if_t<stride_ctor_default<S>::value, int> = 0>
  525. static S make_stride(EigenIndex, EigenIndex) {
  526. return S();
  527. }
  528. template <typename S = StrideType, enable_if_t<stride_ctor_dual<S>::value, int> = 0>
  529. static S make_stride(EigenIndex outer, EigenIndex inner) {
  530. return S(outer, inner);
  531. }
  532. template <typename S = StrideType, enable_if_t<stride_ctor_outer<S>::value, int> = 0>
  533. static S make_stride(EigenIndex outer, EigenIndex) {
  534. return S(outer);
  535. }
  536. template <typename S = StrideType, enable_if_t<stride_ctor_inner<S>::value, int> = 0>
  537. static S make_stride(EigenIndex, EigenIndex inner) {
  538. return S(inner);
  539. }
  540. };
  541. // type_caster for special matrix types (e.g. DiagonalMatrix), which are EigenBase, but not
  542. // EigenDense (i.e. they don't have a data(), at least not with the usual matrix layout).
  543. // load() is not supported, but we can cast them into the python domain by first copying to a
  544. // regular Eigen::Matrix, then casting that.
  545. template <typename Type>
  546. struct type_caster<Type, enable_if_t<is_eigen_other<Type>::value>> {
  547. static_assert(!std::is_pointer<typename Type::Scalar>::value,
  548. PYBIND11_EIGEN_MESSAGE_POINTER_TYPES_ARE_NOT_SUPPORTED);
  549. protected:
  550. using Matrix
  551. = Eigen::Matrix<typename Type::Scalar, Type::RowsAtCompileTime, Type::ColsAtCompileTime>;
  552. using props = EigenProps<Matrix>;
  553. public:
  554. static handle cast(const Type &src, return_value_policy /* policy */, handle /* parent */) {
  555. handle h = eigen_encapsulate<props>(new Matrix(src));
  556. return h;
  557. }
  558. static handle cast(const Type *src, return_value_policy policy, handle parent) {
  559. return cast(*src, policy, parent);
  560. }
  561. static constexpr auto name = props::descriptor;
  562. // Explicitly delete these: support python -> C++ conversion on these (i.e. these can be return
  563. // types but not bound arguments). We still provide them (with an explicitly delete) so that
  564. // you end up here if you try anyway.
  565. bool load(handle, bool) = delete;
  566. operator Type() = delete;
  567. template <typename>
  568. using cast_op_type = Type;
  569. };
  570. template <typename Type>
  571. struct type_caster<Type, enable_if_t<is_eigen_sparse<Type>::value>> {
  572. using Scalar = typename Type::Scalar;
  573. static_assert(!std::is_pointer<Scalar>::value,
  574. PYBIND11_EIGEN_MESSAGE_POINTER_TYPES_ARE_NOT_SUPPORTED);
  575. using StorageIndex = remove_reference_t<decltype(*std::declval<Type>().outerIndexPtr())>;
  576. using Index = typename Type::Index;
  577. static constexpr bool rowMajor = Type::IsRowMajor;
  578. bool load(handle src, bool) {
  579. if (!src) {
  580. return false;
  581. }
  582. auto obj = reinterpret_borrow<object>(src);
  583. object sparse_module = module_::import("scipy.sparse");
  584. object matrix_type = sparse_module.attr(rowMajor ? "csr_matrix" : "csc_matrix");
  585. if (!type::handle_of(obj).is(matrix_type)) {
  586. try {
  587. obj = matrix_type(obj);
  588. } catch (const error_already_set &) {
  589. return false;
  590. }
  591. }
  592. auto values = array_t<Scalar>((object) obj.attr("data"));
  593. auto innerIndices = array_t<StorageIndex>((object) obj.attr("indices"));
  594. auto outerIndices = array_t<StorageIndex>((object) obj.attr("indptr"));
  595. auto shape = pybind11::tuple((pybind11::object) obj.attr("shape"));
  596. auto nnz = obj.attr("nnz").cast<Index>();
  597. if (!values || !innerIndices || !outerIndices) {
  598. return false;
  599. }
  600. value = EigenMapSparseMatrix<Scalar,
  601. Type::Flags &(Eigen::RowMajor | Eigen::ColMajor),
  602. StorageIndex>(shape[0].cast<Index>(),
  603. shape[1].cast<Index>(),
  604. std::move(nnz),
  605. outerIndices.mutable_data(),
  606. innerIndices.mutable_data(),
  607. values.mutable_data());
  608. return true;
  609. }
  610. static handle cast(const Type &src, return_value_policy /* policy */, handle /* parent */) {
  611. const_cast<Type &>(src).makeCompressed();
  612. object matrix_type
  613. = module_::import("scipy.sparse").attr(rowMajor ? "csr_matrix" : "csc_matrix");
  614. array data(src.nonZeros(), src.valuePtr());
  615. array outerIndices((rowMajor ? src.rows() : src.cols()) + 1, src.outerIndexPtr());
  616. array innerIndices(src.nonZeros(), src.innerIndexPtr());
  617. return matrix_type(pybind11::make_tuple(
  618. std::move(data), std::move(innerIndices), std::move(outerIndices)),
  619. pybind11::make_tuple(src.rows(), src.cols()))
  620. .release();
  621. }
  622. PYBIND11_TYPE_CASTER(Type,
  623. const_name<(Type::IsRowMajor) != 0>("scipy.sparse.csr_matrix[",
  624. "scipy.sparse.csc_matrix[")
  625. + npy_format_descriptor<Scalar>::name + const_name("]"));
  626. };
  627. PYBIND11_NAMESPACE_END(detail)
  628. PYBIND11_NAMESPACE_END(PYBIND11_NAMESPACE)