xslice.hpp 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671
  1. /***************************************************************************
  2. * Copyright (c) Johan Mabille, Sylvain Corlay and Wolf Vollprecht *
  3. * Copyright (c) QuantStack *
  4. * *
  5. * Distributed under the terms of the BSD 3-Clause License. *
  6. * *
  7. * The full license is in the file LICENSE, distributed with this software. *
  8. ****************************************************************************/
  9. #ifndef XTENSOR_SLICE_HPP
  10. #define XTENSOR_SLICE_HPP
  11. #include <cstddef>
  12. #include <map>
  13. #include <type_traits>
  14. #include <utility>
  15. #include <xtl/xtype_traits.hpp>
  16. #include "xstorage.hpp"
  17. #include "xtensor_config.hpp"
  18. #include "xutils.hpp"
  19. #ifndef XTENSOR_CONSTEXPR
  20. #if (defined(_MSC_VER) || __GNUC__ < 8)
  21. #define XTENSOR_CONSTEXPR inline
  22. #define XTENSOR_GLOBAL_CONSTEXPR static const
  23. #else
  24. #define XTENSOR_CONSTEXPR constexpr
  25. #define XTENSOR_GLOBAL_CONSTEXPR constexpr
  26. #endif
  27. #endif
  28. namespace xt
  29. {
  30. /**********************
  31. * xslice declaration *
  32. **********************/
  33. template <class D>
  34. class xslice
  35. {
  36. public:
  37. using derived_type = D;
  38. derived_type& derived_cast() noexcept;
  39. const derived_type& derived_cast() const noexcept;
  40. protected:
  41. xslice() = default;
  42. ~xslice() = default;
  43. xslice(const xslice&) = default;
  44. xslice& operator=(const xslice&) = default;
  45. xslice(xslice&&) = default;
  46. xslice& operator=(xslice&&) = default;
  47. };
  48. template <class S>
  49. using is_xslice = std::is_base_of<xslice<S>, S>;
  50. template <class E, class R = void>
  51. using disable_xslice = typename std::enable_if<!is_xslice<E>::value, R>::type;
  52. template <class... E>
  53. using has_xslice = xtl::disjunction<is_xslice<E>...>;
  54. /**************
  55. * slice tags *
  56. **************/
  57. #define DEFINE_TAG_CONVERSION(NAME) \
  58. template <class T> \
  59. XTENSOR_CONSTEXPR NAME convert() const noexcept \
  60. { \
  61. return NAME(); \
  62. }
  63. struct xall_tag
  64. {
  65. DEFINE_TAG_CONVERSION(xall_tag)
  66. };
  67. struct xnewaxis_tag
  68. {
  69. DEFINE_TAG_CONVERSION(xnewaxis_tag)
  70. };
  71. struct xellipsis_tag
  72. {
  73. DEFINE_TAG_CONVERSION(xellipsis_tag)
  74. };
  75. #undef DEFINE_TAG_CONVERSION
  76. /**********************
  77. * xrange declaration *
  78. **********************/
  79. template <class T>
  80. class xrange : public xslice<xrange<T>>
  81. {
  82. public:
  83. using size_type = T;
  84. using self_type = xrange<T>;
  85. xrange() = default;
  86. xrange(size_type start_val, size_type stop_val) noexcept;
  87. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  88. operator xrange<S>() const noexcept;
  89. // Same as implicit conversion operator but more convenient to call
  90. // from a variant visitor
  91. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  92. xrange<S> convert() const noexcept;
  93. size_type operator()(size_type i) const noexcept;
  94. size_type size() const noexcept;
  95. size_type step_size() const noexcept;
  96. size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
  97. size_type revert_index(std::size_t i) const noexcept;
  98. bool contains(size_type i) const noexcept;
  99. bool operator==(const self_type& rhs) const noexcept;
  100. bool operator!=(const self_type& rhs) const noexcept;
  101. private:
  102. size_type m_start;
  103. size_type m_size;
  104. template <class S>
  105. friend class xrange;
  106. };
  107. /******************************
  108. * xstepped_range declaration *
  109. ******************************/
  110. template <class T>
  111. class xstepped_range : public xslice<xstepped_range<T>>
  112. {
  113. public:
  114. using size_type = T;
  115. using self_type = xstepped_range<T>;
  116. xstepped_range() = default;
  117. xstepped_range(size_type start_val, size_type stop_val, size_type step) noexcept;
  118. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  119. operator xstepped_range<S>() const noexcept;
  120. // Same as implicit conversion operator but more convenient to call
  121. // from a variant visitor
  122. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  123. xstepped_range<S> convert() const noexcept;
  124. size_type operator()(size_type i) const noexcept;
  125. size_type size() const noexcept;
  126. size_type step_size() const noexcept;
  127. size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
  128. size_type revert_index(std::size_t i) const noexcept;
  129. bool contains(size_type i) const noexcept;
  130. bool operator==(const self_type& rhs) const noexcept;
  131. bool operator!=(const self_type& rhs) const noexcept;
  132. private:
  133. size_type m_start;
  134. size_type m_size;
  135. size_type m_step;
  136. template <class S>
  137. friend class xstepped_range;
  138. };
  139. /********************
  140. * xall declaration *
  141. ********************/
  142. template <class T>
  143. class xall : public xslice<xall<T>>
  144. {
  145. public:
  146. using size_type = T;
  147. using self_type = xall<T>;
  148. xall() = default;
  149. explicit xall(size_type size) noexcept;
  150. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  151. operator xall<S>() const noexcept;
  152. // Same as implicit conversion operator but more convenient to call
  153. // from a variant visitor
  154. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  155. xall<S> convert() const noexcept;
  156. size_type operator()(size_type i) const noexcept;
  157. size_type size() const noexcept;
  158. size_type step_size() const noexcept;
  159. size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
  160. size_type revert_index(std::size_t i) const noexcept;
  161. bool contains(size_type i) const noexcept;
  162. bool operator==(const self_type& rhs) const noexcept;
  163. bool operator!=(const self_type& rhs) const noexcept;
  164. private:
  165. size_type m_size;
  166. };
  167. /**
  168. * Returns a slice representing a full dimension,
  169. * to be used as an argument of view function.
  170. * @sa view, strided_view
  171. */
  172. inline auto all() noexcept
  173. {
  174. return xall_tag();
  175. }
  176. /**
  177. * Returns a slice representing all remaining dimensions,
  178. * and selecting all in these dimensions. Ellipsis will expand
  179. * to a series of `all()` slices, until the number of slices is
  180. * equal to the number of dimensions of the source array.
  181. *
  182. * Note: ellipsis can only be used in strided_view!
  183. *
  184. * @code{.cpp}
  185. * xarray<double> a = xarray<double>::from_shape({5, 5, 1, 1, 5});
  186. * auto v = xt::strided_view(a, {2, xt::ellipsis(), 2});
  187. * // equivalent to using {2, xt::all(), xt::all(), xt::all(), 2};
  188. * @endcode
  189. *
  190. * @sa strided_view
  191. */
  192. inline auto ellipsis() noexcept
  193. {
  194. return xellipsis_tag();
  195. }
  196. /************************
  197. * xnewaxis declaration *
  198. ************************/
  199. template <class T>
  200. class xnewaxis : public xslice<xnewaxis<T>>
  201. {
  202. public:
  203. using size_type = T;
  204. using self_type = xnewaxis<T>;
  205. xnewaxis() = default;
  206. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  207. operator xnewaxis<S>() const noexcept;
  208. // Same as implicit conversion operator but more convenient to call
  209. // from a variant visitor
  210. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  211. xnewaxis<S> convert() const noexcept;
  212. size_type operator()(size_type i) const noexcept;
  213. size_type size() const noexcept;
  214. size_type step_size() const noexcept;
  215. size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
  216. size_type revert_index(std::size_t i) const noexcept;
  217. bool contains(size_type i) const noexcept;
  218. bool operator==(const self_type& rhs) const noexcept;
  219. bool operator!=(const self_type& rhs) const noexcept;
  220. };
  221. /**
  222. * Returns a slice representing a new axis of length one,
  223. * to be used as an argument of view function.
  224. * @sa view, strided_view
  225. */
  226. inline auto newaxis() noexcept
  227. {
  228. return xnewaxis_tag();
  229. }
  230. /***************************
  231. * xkeep_slice declaration *
  232. ***************************/
  233. template <class T>
  234. class xkeep_slice;
  235. namespace detail
  236. {
  237. template <class T>
  238. struct is_xkeep_slice : std::false_type
  239. {
  240. };
  241. template <class T>
  242. struct is_xkeep_slice<xkeep_slice<T>> : std::true_type
  243. {
  244. };
  245. template <class T>
  246. using disable_xkeep_slice_t = std::enable_if_t<!is_xkeep_slice<std::decay_t<T>>::value, void>;
  247. template <class T>
  248. using enable_xkeep_slice_t = std::enable_if_t<is_xkeep_slice<std::decay_t<T>>::value, void>;
  249. }
  250. template <class T>
  251. class xkeep_slice : public xslice<xkeep_slice<T>>
  252. {
  253. public:
  254. using container_type = svector<T>;
  255. using size_type = typename container_type::value_type;
  256. using self_type = xkeep_slice<T>;
  257. template <class C, typename = detail::disable_xkeep_slice_t<C>>
  258. explicit xkeep_slice(C& cont);
  259. explicit xkeep_slice(container_type&& cont);
  260. template <class S>
  261. xkeep_slice(std::initializer_list<S> t);
  262. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  263. operator xkeep_slice<S>() const noexcept;
  264. // Same as implicit conversion operator but more convenient to call
  265. // from a variant visitor
  266. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  267. xkeep_slice<S> convert() const noexcept;
  268. size_type operator()(size_type i) const noexcept;
  269. size_type size() const noexcept;
  270. void normalize(std::size_t s);
  271. size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
  272. size_type revert_index(std::size_t i) const;
  273. bool contains(size_type i) const noexcept;
  274. bool operator==(const self_type& rhs) const noexcept;
  275. bool operator!=(const self_type& rhs) const noexcept;
  276. private:
  277. xkeep_slice() = default;
  278. container_type m_indices;
  279. container_type m_raw_indices;
  280. template <class S>
  281. friend class xkeep_slice;
  282. };
  283. namespace detail
  284. {
  285. template <class T>
  286. using disable_integral_keep = std::enable_if_t<
  287. !xtl::is_integral<std::decay_t<T>>::value,
  288. xkeep_slice<typename std::decay_t<T>::value_type>>;
  289. template <class T, class R>
  290. using enable_integral_keep = std::enable_if_t<xtl::is_integral<T>::value, xkeep_slice<R>>;
  291. }
  292. /**
  293. * Create a non-contigous slice from a container of indices to keep.
  294. * Note: this slice cannot be used in the xstrided_view!
  295. *
  296. * @code{.cpp}
  297. * xt::xarray<double> a = xt::arange(9);
  298. * a.reshape({3, 3});
  299. * xt::view(a, xt::keep(0, 2); // => {{0, 1, 2}, {6, 7, 8}}
  300. * xt::view(a, xt::keep(1, 1, 1); // => {{3, 4, 5}, {3, 4, 5}, {3, 4, 5}}
  301. * @endcode
  302. *
  303. * @param indices The indices container
  304. * @return instance of xkeep_slice
  305. */
  306. template <class T>
  307. inline detail::disable_integral_keep<T> keep(T&& indices)
  308. {
  309. return xkeep_slice<typename std::decay_t<T>::value_type>(std::forward<T>(indices));
  310. }
  311. template <class R = std::ptrdiff_t, class T>
  312. inline detail::enable_integral_keep<T, R> keep(T i)
  313. {
  314. using slice_type = xkeep_slice<R>;
  315. using container_type = typename slice_type::container_type;
  316. container_type tmp = {static_cast<R>(i)};
  317. return slice_type(std::move(tmp));
  318. }
  319. template <class R = std::ptrdiff_t, class Arg0, class Arg1, class... Args>
  320. inline xkeep_slice<R> keep(Arg0 i0, Arg1 i1, Args... args)
  321. {
  322. using slice_type = xkeep_slice<R>;
  323. using container_type = typename slice_type::container_type;
  324. container_type tmp = {static_cast<R>(i0), static_cast<R>(i1), static_cast<R>(args)...};
  325. return slice_type(std::move(tmp));
  326. }
  327. /***************************
  328. * xdrop_slice declaration *
  329. ***************************/
  330. template <class T>
  331. class xdrop_slice;
  332. namespace detail
  333. {
  334. template <class T>
  335. struct is_xdrop_slice : std::false_type
  336. {
  337. };
  338. template <class T>
  339. struct is_xdrop_slice<xdrop_slice<T>> : std::true_type
  340. {
  341. };
  342. template <class T>
  343. using disable_xdrop_slice_t = std::enable_if_t<!is_xdrop_slice<std::decay_t<T>>::value, void>;
  344. template <class T>
  345. using enable_xdrop_slice_t = std::enable_if_t<is_xdrop_slice<std::decay_t<T>>::value, void>;
  346. }
  347. template <class T>
  348. class xdrop_slice : public xslice<xdrop_slice<T>>
  349. {
  350. public:
  351. using container_type = svector<T>;
  352. using size_type = typename container_type::value_type;
  353. using self_type = xdrop_slice<T>;
  354. template <class C, typename = detail::disable_xdrop_slice_t<C>>
  355. explicit xdrop_slice(C& cont);
  356. explicit xdrop_slice(container_type&& cont);
  357. template <class S>
  358. xdrop_slice(std::initializer_list<S> t);
  359. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  360. operator xdrop_slice<S>() const noexcept;
  361. // Same as implicit conversion operator but more convenient to call
  362. // from a variant visitor
  363. template <class S, typename = std::enable_if_t<std::is_convertible<S, T>::value, void>>
  364. xdrop_slice<S> convert() const noexcept;
  365. size_type operator()(size_type i) const noexcept;
  366. size_type size() const noexcept;
  367. void normalize(std::size_t s);
  368. size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
  369. size_type revert_index(std::size_t i) const;
  370. bool contains(size_type i) const noexcept;
  371. bool operator==(const self_type& rhs) const noexcept;
  372. bool operator!=(const self_type& rhs) const noexcept;
  373. private:
  374. xdrop_slice() = default;
  375. container_type m_indices;
  376. container_type m_raw_indices;
  377. std::map<size_type, size_type> m_inc;
  378. size_type m_size;
  379. template <class S>
  380. friend class xdrop_slice;
  381. };
  382. namespace detail
  383. {
  384. template <class T>
  385. using disable_integral_drop = std::enable_if_t<
  386. !xtl::is_integral<std::decay_t<T>>::value,
  387. xdrop_slice<typename std::decay_t<T>::value_type>>;
  388. template <class T, class R>
  389. using enable_integral_drop = std::enable_if_t<xtl::is_integral<T>::value, xdrop_slice<R>>;
  390. }
  391. /**
  392. * Create a non-contigous slice from a container of indices to drop.
  393. * Note: this slice cannot be used in the xstrided_view!
  394. *
  395. * @code{.cpp}
  396. * xt::xarray<double> a = xt::arange(9);
  397. * a.reshape({3, 3});
  398. * xt::view(a, xt::drop(0, 2); // => {{3, 4, 5}}
  399. * @endcode
  400. *
  401. * @param indices The container of indices to drop
  402. * @return instance of xdrop_slice
  403. */
  404. template <class T>
  405. inline detail::disable_integral_drop<T> drop(T&& indices)
  406. {
  407. return xdrop_slice<typename std::decay_t<T>::value_type>(std::forward<T>(indices));
  408. }
  409. template <class R = std::ptrdiff_t, class T>
  410. inline detail::enable_integral_drop<T, R> drop(T i)
  411. {
  412. using slice_type = xdrop_slice<R>;
  413. using container_type = typename slice_type::container_type;
  414. container_type tmp = {static_cast<R>(i)};
  415. return slice_type(std::move(tmp));
  416. }
  417. template <class R = std::ptrdiff_t, class Arg0, class Arg1, class... Args>
  418. inline xdrop_slice<R> drop(Arg0 i0, Arg1 i1, Args... args)
  419. {
  420. using slice_type = xdrop_slice<R>;
  421. using container_type = typename slice_type::container_type;
  422. container_type tmp = {static_cast<R>(i0), static_cast<R>(i1), static_cast<R>(args)...};
  423. return slice_type(std::move(tmp));
  424. }
  425. /******************************
  426. * xrange_adaptor declaration *
  427. ******************************/
  428. template <class A, class B = A, class C = A>
  429. struct xrange_adaptor
  430. {
  431. xrange_adaptor(A start_val, B stop_val, C step)
  432. : m_start(start_val)
  433. , m_stop(stop_val)
  434. , m_step(step)
  435. {
  436. }
  437. template <class MI = A, class MA = B, class STEP = C>
  438. inline std::enable_if_t<
  439. xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
  440. xstepped_range<std::ptrdiff_t>>
  441. get(std::size_t size) const
  442. {
  443. return get_stepped_range(m_start, m_stop, m_step, size);
  444. }
  445. template <class MI = A, class MA = B, class STEP = C>
  446. inline std::enable_if_t<
  447. !xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
  448. xstepped_range<std::ptrdiff_t>>
  449. get(std::size_t size) const
  450. {
  451. return get_stepped_range(m_step > 0 ? 0 : static_cast<std::ptrdiff_t>(size) - 1, m_stop, m_step, size);
  452. }
  453. template <class MI = A, class MA = B, class STEP = C>
  454. inline std::enable_if_t<
  455. xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
  456. xstepped_range<std::ptrdiff_t>>
  457. get(std::size_t size) const
  458. {
  459. auto sz = static_cast<std::ptrdiff_t>(size);
  460. return get_stepped_range(m_start, m_step > 0 ? sz : -(sz + 1), m_step, size);
  461. }
  462. template <class MI = A, class MA = B, class STEP = C>
  463. inline std::enable_if_t<
  464. xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
  465. xrange<std::ptrdiff_t>>
  466. get(std::size_t size) const
  467. {
  468. return xrange<std::ptrdiff_t>(normalize(m_start, size), normalize(m_stop, size));
  469. }
  470. template <class MI = A, class MA = B, class STEP = C>
  471. inline std::enable_if_t<
  472. !xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
  473. xstepped_range<std::ptrdiff_t>>
  474. get(std::size_t size) const
  475. {
  476. std::ptrdiff_t start = m_step >= 0 ? 0 : static_cast<std::ptrdiff_t>(size) - 1;
  477. std::ptrdiff_t stop = m_step >= 0 ? static_cast<std::ptrdiff_t>(size) : -1;
  478. return xstepped_range<std::ptrdiff_t>(start, stop, m_step);
  479. }
  480. template <class MI = A, class MA = B, class STEP = C>
  481. inline std::enable_if_t<
  482. xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
  483. xrange<std::ptrdiff_t>>
  484. get(std::size_t size) const
  485. {
  486. return xrange<std::ptrdiff_t>(normalize(m_start, size), static_cast<std::ptrdiff_t>(size));
  487. }
  488. template <class MI = A, class MA = B, class STEP = C>
  489. inline std::enable_if_t<
  490. !xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
  491. xrange<std::ptrdiff_t>>
  492. get(std::size_t size) const
  493. {
  494. return xrange<std::ptrdiff_t>(0, normalize(m_stop, size));
  495. }
  496. template <class MI = A, class MA = B, class STEP = C>
  497. inline std::enable_if_t<
  498. !xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
  499. xall<std::ptrdiff_t>>
  500. get(std::size_t size) const
  501. {
  502. return xall<std::ptrdiff_t>(static_cast<std::ptrdiff_t>(size));
  503. }
  504. A start() const
  505. {
  506. return m_start;
  507. }
  508. B stop() const
  509. {
  510. return m_stop;
  511. }
  512. C step() const
  513. {
  514. return m_step;
  515. }
  516. private:
  517. static auto normalize(std::ptrdiff_t val, std::size_t ssize)
  518. {
  519. std::ptrdiff_t size = static_cast<std::ptrdiff_t>(ssize);
  520. val = (val >= 0) ? val : val + size;
  521. return (std::max)(std::ptrdiff_t(0), (std::min)(size, val));
  522. }
  523. static auto
  524. get_stepped_range(std::ptrdiff_t start, std::ptrdiff_t stop, std::ptrdiff_t step, std::size_t ssize)
  525. {
  526. std::ptrdiff_t size = static_cast<std::ptrdiff_t>(ssize);
  527. start = (start >= 0) ? start : start + size;
  528. stop = (stop >= 0) ? stop : stop + size;
  529. if (step > 0)
  530. {
  531. start = (std::max)(std::ptrdiff_t(0), (std::min)(size, start));
  532. stop = (std::max)(std::ptrdiff_t(0), (std::min)(size, stop));
  533. }
  534. else
  535. {
  536. start = (std::max)(std::ptrdiff_t(-1), (std::min)(size - 1, start));
  537. stop = (std::max)(std::ptrdiff_t(-1), (std::min)(size - 1, stop));
  538. }
  539. return xstepped_range<std::ptrdiff_t>(start, stop, step);
  540. }
  541. A m_start;
  542. B m_stop;
  543. C m_step;
  544. };
  545. /*******************************
  546. * Placeholders and rangemaker *
  547. *******************************/
  548. namespace placeholders
  549. {
  550. // xtensor universal placeholder
  551. struct xtuph
  552. {
  553. };
  554. template <class... Args>
  555. struct rangemaker
  556. {
  557. std::ptrdiff_t rng[3]; // = { 0, 0, 0 };
  558. };
  559. XTENSOR_CONSTEXPR xtuph get_tuph_or_val(std::ptrdiff_t /*val*/, std::true_type)
  560. {
  561. return xtuph();
  562. }
  563. XTENSOR_CONSTEXPR std::ptrdiff_t get_tuph_or_val(std::ptrdiff_t val, std::false_type)
  564. {
  565. return val;
  566. }
  567. template <class A, class B, class C>
  568. struct rangemaker<A, B, C>
  569. {
  570. XTENSOR_CONSTEXPR operator xrange_adaptor<A, B, C>()
  571. {
  572. return xrange_adaptor<A, B, C>(
  573. {get_tuph_or_val(rng[0], std::is_same<A, xtuph>()),
  574. get_tuph_or_val(rng[1], std::is_same<B, xtuph>()),
  575. get_tuph_or_val(rng[2], std::is_same<C, xtuph>())}
  576. );
  577. }
  578. std::ptrdiff_t rng[3]; // = { 0, 0, 0 };
  579. };
  580. template <class A, class B>
  581. struct rangemaker<A, B>
  582. {
  583. XTENSOR_CONSTEXPR operator xrange_adaptor<A, B, xt::placeholders::xtuph>()
  584. {
  585. return xrange_adaptor<A, B, xt::placeholders::xtuph>(
  586. {get_tuph_or_val(rng[0], std::is_same<A, xtuph>()),
  587. get_tuph_or_val(rng[1], std::is_same<B, xtuph>()),
  588. xtuph()}
  589. );
  590. }
  591. std::ptrdiff_t rng[3]; // = { 0, 0, 0 };
  592. };
  593. template <class... OA>
  594. XTENSOR_CONSTEXPR auto operator|(const rangemaker<OA...>& rng, const std::ptrdiff_t& t)
  595. {
  596. auto nrng = rangemaker<OA..., std::ptrdiff_t>({rng.rng[0], rng.rng[1], rng.rng[2]});
  597. nrng.rng[sizeof...(OA)] = t;
  598. return nrng;
  599. }
  600. template <class... OA>
  601. XTENSOR_CONSTEXPR auto operator|(const rangemaker<OA...>& rng, const xt::placeholders::xtuph& /*t*/)
  602. {
  603. auto nrng = rangemaker<OA..., xt::placeholders::xtuph>({rng.rng[0], rng.rng[1], rng.rng[2]});
  604. return nrng;
  605. }
  606. XTENSOR_GLOBAL_CONSTEXPR xtuph _{};
  607. XTENSOR_GLOBAL_CONSTEXPR rangemaker<> _r = rangemaker<>({0, 0, 0});
  608. XTENSOR_GLOBAL_CONSTEXPR xall_tag _a{};
  609. XTENSOR_GLOBAL_CONSTEXPR xnewaxis_tag _n{};
  610. XTENSOR_GLOBAL_CONSTEXPR xellipsis_tag _e{};
  611. }
  612. inline auto xnone()
  613. {
  614. return placeholders::xtuph();
  615. }
  616. namespace detail
  617. {
  618. template <class T, class E = void>
  619. struct cast_if_integer
  620. {
  621. using type = T;
  622. type operator()(T t)
  623. {
  624. return t;
  625. }
  626. };
  627. template <class T>
  628. struct cast_if_integer<T, std::enable_if_t<xtl::is_integral<T>::value>>
  629. {
  630. using type = std::ptrdiff_t;
  631. type operator()(T t)
  632. {
  633. return static_cast<type>(t);
  634. }
  635. };
  636. template <class T>
  637. using cast_if_integer_t = typename cast_if_integer<T>::type;
  638. }
  639. /**
  640. * Select a range from start_val to stop_val (excluded).
  641. * You can use the shorthand `_` syntax to select from the start or until the end.
  642. *
  643. * @code{.cpp}
  644. * using namespace xt::placeholders; // to enable _ syntax
  645. *
  646. * range(3, _) // select from index 3 to the end
  647. * range(_, 5) // select from index 0 to 5 (excluded)
  648. * range(_, _) // equivalent to `all()`
  649. * @endcode
  650. *
  651. * @sa view, strided_view
  652. */
  653. template <class A, class B>
  654. inline auto range(A start_val, B stop_val)
  655. {
  656. return xrange_adaptor<detail::cast_if_integer_t<A>, detail::cast_if_integer_t<B>, placeholders::xtuph>(
  657. detail::cast_if_integer<A>{}(start_val),
  658. detail::cast_if_integer<B>{}(stop_val),
  659. placeholders::xtuph()
  660. );
  661. }
  662. /**
  663. * Select a range from start_val to stop_val (excluded) with step
  664. * You can use the shorthand `_` syntax to select from the start or until the end.
  665. *
  666. * @code{.cpp}
  667. * using namespace xt::placeholders; // to enable _ syntax
  668. * range(3, _, 5) // select from index 3 to the end with stepsize 5
  669. * @endcode
  670. *
  671. * @sa view, strided_view
  672. */
  673. template <class A, class B, class C>
  674. inline auto range(A start_val, B stop_val, C step)
  675. {
  676. return xrange_adaptor<detail::cast_if_integer_t<A>, detail::cast_if_integer_t<B>, detail::cast_if_integer_t<C>>(
  677. detail::cast_if_integer<A>{}(start_val),
  678. detail::cast_if_integer<B>{}(stop_val),
  679. detail::cast_if_integer<C>{}(step)
  680. );
  681. }
  682. /******************************************************
  683. * homogeneous get_size for integral types and slices *
  684. ******************************************************/
  685. template <class S>
  686. inline disable_xslice<S, std::size_t> get_size(const S&) noexcept
  687. {
  688. return 1;
  689. }
  690. template <class S>
  691. inline auto get_size(const xslice<S>& slice) noexcept
  692. {
  693. return slice.derived_cast().size();
  694. }
  695. /*******************************************************
  696. * homogeneous step_size for integral types and slices *
  697. *******************************************************/
  698. template <class S>
  699. inline disable_xslice<S, std::size_t> step_size(const S&, std::size_t) noexcept
  700. {
  701. return 0;
  702. }
  703. template <class S>
  704. inline disable_xslice<S, std::size_t> step_size(const S&, std::size_t, std::size_t) noexcept
  705. {
  706. return 0;
  707. }
  708. template <class S>
  709. inline auto step_size(const xslice<S>& slice, std::size_t idx) noexcept
  710. {
  711. return slice.derived_cast().step_size(idx);
  712. }
  713. template <class S>
  714. inline auto step_size(const xslice<S>& slice, std::size_t idx, std::size_t n) noexcept
  715. {
  716. return slice.derived_cast().step_size(idx, n);
  717. }
  718. /*********************************************
  719. * homogeneous value for integral and slices *
  720. *********************************************/
  721. template <class S, class I>
  722. inline disable_xslice<S, std::size_t> value(const S& s, I) noexcept
  723. {
  724. return static_cast<std::size_t>(s);
  725. }
  726. template <class S, class I>
  727. inline auto value(const xslice<S>& slice, I i) noexcept
  728. {
  729. using ST = typename S::size_type;
  730. return slice.derived_cast()(static_cast<ST>(i));
  731. }
  732. /****************************************
  733. * homogeneous get_slice_implementation *
  734. ****************************************/
  735. namespace detail
  736. {
  737. template <class T>
  738. struct slice_implementation_getter
  739. {
  740. template <class E, class SL>
  741. inline decltype(auto) operator()(E& e, SL&& slice, std::size_t index) const
  742. {
  743. return get_slice(e, std::forward<SL>(slice), index, xtl::is_signed<std::decay_t<SL>>());
  744. }
  745. private:
  746. template <class E, class SL>
  747. inline decltype(auto) get_slice(E&, SL&& slice, std::size_t, std::false_type) const
  748. {
  749. return std::forward<SL>(slice);
  750. }
  751. template <class E, class SL>
  752. inline decltype(auto) get_slice(E& e, SL&& slice, std::size_t index, std::true_type) const
  753. {
  754. using int_type = std::decay_t<SL>;
  755. return slice < int_type(0) ? slice + static_cast<std::ptrdiff_t>(e.shape(index))
  756. : std::ptrdiff_t(slice);
  757. }
  758. };
  759. struct keep_drop_getter
  760. {
  761. template <class E, class SL>
  762. inline decltype(auto) operator()(E& e, SL&& slice, std::size_t index) const
  763. {
  764. slice.normalize(e.shape()[index]);
  765. return std::forward<SL>(slice);
  766. }
  767. template <class E, class SL>
  768. inline auto operator()(E& e, const SL& slice, std::size_t index) const
  769. {
  770. return this->operator()(e, SL(slice), index);
  771. }
  772. };
  773. template <class T>
  774. struct slice_implementation_getter<xkeep_slice<T>> : keep_drop_getter
  775. {
  776. };
  777. template <class T>
  778. struct slice_implementation_getter<xdrop_slice<T>> : keep_drop_getter
  779. {
  780. };
  781. template <>
  782. struct slice_implementation_getter<xall_tag>
  783. {
  784. template <class E, class SL>
  785. inline auto operator()(E& e, SL&&, std::size_t index) const
  786. {
  787. return xall<typename E::size_type>(e.shape()[index]);
  788. }
  789. };
  790. template <>
  791. struct slice_implementation_getter<xnewaxis_tag>
  792. {
  793. template <class E, class SL>
  794. inline auto operator()(E&, SL&&, std::size_t) const
  795. {
  796. return xnewaxis<typename E::size_type>();
  797. }
  798. };
  799. template <class A, class B, class C>
  800. struct slice_implementation_getter<xrange_adaptor<A, B, C>>
  801. {
  802. template <class E, class SL>
  803. inline auto operator()(E& e, SL&& adaptor, std::size_t index) const
  804. {
  805. return adaptor.get(e.shape()[index]);
  806. }
  807. };
  808. }
  809. template <class E, class SL>
  810. inline auto get_slice_implementation(E& e, SL&& slice, std::size_t index)
  811. {
  812. detail::slice_implementation_getter<std::decay_t<SL>> getter;
  813. return getter(e, std::forward<SL>(slice), index);
  814. }
  815. /******************************
  816. * homogeneous get_slice_type *
  817. ******************************/
  818. namespace detail
  819. {
  820. template <class E, class SL>
  821. struct get_slice_type_impl
  822. {
  823. using type = SL;
  824. };
  825. template <class E>
  826. struct get_slice_type_impl<E, xall_tag>
  827. {
  828. using type = xall<typename E::size_type>;
  829. };
  830. template <class E>
  831. struct get_slice_type_impl<E, xnewaxis_tag>
  832. {
  833. using type = xnewaxis<typename E::size_type>;
  834. };
  835. template <class E, class A, class B, class C>
  836. struct get_slice_type_impl<E, xrange_adaptor<A, B, C>>
  837. {
  838. using type = decltype(xrange_adaptor<A, B, C>(A(), B(), C()).get(0));
  839. };
  840. }
  841. template <class E, class SL>
  842. using get_slice_type = typename detail::get_slice_type_impl<E, std::remove_reference_t<SL>>::type;
  843. /*************************
  844. * xslice implementation *
  845. *************************/
  846. template <class D>
  847. inline auto xslice<D>::derived_cast() noexcept -> derived_type&
  848. {
  849. return *static_cast<derived_type*>(this);
  850. }
  851. template <class D>
  852. inline auto xslice<D>::derived_cast() const noexcept -> const derived_type&
  853. {
  854. return *static_cast<const derived_type*>(this);
  855. }
  856. /*************************
  857. * xrange implementation *
  858. *************************/
  859. template <class T>
  860. inline xrange<T>::xrange(size_type start_val, size_type stop_val) noexcept
  861. : m_start(start_val)
  862. , m_size(stop_val > start_val ? stop_val - start_val : 0)
  863. {
  864. }
  865. template <class T>
  866. template <class S, typename>
  867. inline xrange<T>::operator xrange<S>() const noexcept
  868. {
  869. xrange<S> ret;
  870. ret.m_start = static_cast<S>(m_start);
  871. ret.m_size = static_cast<S>(m_size);
  872. return ret;
  873. }
  874. template <class T>
  875. template <class S, typename>
  876. inline xrange<S> xrange<T>::convert() const noexcept
  877. {
  878. return xrange<S>(*this);
  879. }
  880. template <class T>
  881. inline auto xrange<T>::operator()(size_type i) const noexcept -> size_type
  882. {
  883. return m_start + i;
  884. }
  885. template <class T>
  886. inline auto xrange<T>::size() const noexcept -> size_type
  887. {
  888. return m_size;
  889. }
  890. template <class T>
  891. inline auto xrange<T>::step_size() const noexcept -> size_type
  892. {
  893. return 1;
  894. }
  895. template <class T>
  896. inline auto xrange<T>::step_size(std::size_t /*i*/, std::size_t n) const noexcept -> size_type
  897. {
  898. return static_cast<size_type>(n);
  899. }
  900. template <class T>
  901. inline auto xrange<T>::revert_index(std::size_t i) const noexcept -> size_type
  902. {
  903. return i - m_start;
  904. }
  905. template <class T>
  906. inline bool xrange<T>::contains(size_type i) const noexcept
  907. {
  908. return i >= m_start && i < m_start + m_size;
  909. }
  910. template <class T>
  911. inline bool xrange<T>::operator==(const self_type& rhs) const noexcept
  912. {
  913. return (m_start == rhs.m_start) && (m_size == rhs.m_size);
  914. }
  915. template <class T>
  916. inline bool xrange<T>::operator!=(const self_type& rhs) const noexcept
  917. {
  918. return !(*this == rhs);
  919. }
  920. /********************************
  921. * xtepped_range implementation *
  922. ********************************/
  923. template <class T>
  924. inline xstepped_range<T>::xstepped_range(size_type start_val, size_type stop_val, size_type step) noexcept
  925. : m_start(start_val)
  926. , m_size(size_type(0))
  927. , m_step(step)
  928. {
  929. size_type n = stop_val - start_val;
  930. m_size = n / step + (((n < 0) ^ (step > 0)) && (n % step));
  931. }
  932. template <class T>
  933. template <class S, typename>
  934. inline xstepped_range<T>::operator xstepped_range<S>() const noexcept
  935. {
  936. xstepped_range<S> ret;
  937. ret.m_start = static_cast<S>(m_start);
  938. ret.m_size = static_cast<S>(m_size);
  939. ret.m_step = static_cast<S>(m_step);
  940. return ret;
  941. }
  942. template <class T>
  943. template <class S, typename>
  944. inline xstepped_range<S> xstepped_range<T>::convert() const noexcept
  945. {
  946. return xstepped_range<S>(*this);
  947. }
  948. template <class T>
  949. inline auto xstepped_range<T>::operator()(size_type i) const noexcept -> size_type
  950. {
  951. return m_start + i * m_step;
  952. }
  953. template <class T>
  954. inline auto xstepped_range<T>::size() const noexcept -> size_type
  955. {
  956. return m_size;
  957. }
  958. template <class T>
  959. inline auto xstepped_range<T>::step_size() const noexcept -> size_type
  960. {
  961. return m_step;
  962. }
  963. template <class T>
  964. inline auto xstepped_range<T>::step_size(std::size_t /*i*/, std::size_t n) const noexcept -> size_type
  965. {
  966. return m_step * static_cast<size_type>(n);
  967. }
  968. template <class T>
  969. inline auto xstepped_range<T>::revert_index(std::size_t i) const noexcept -> size_type
  970. {
  971. return (i - m_start) / m_step;
  972. }
  973. template <class T>
  974. inline bool xstepped_range<T>::contains(size_type i) const noexcept
  975. {
  976. return i >= m_start && i < m_start + m_size * m_step && ((i - m_start) % m_step == 0);
  977. }
  978. template <class T>
  979. inline bool xstepped_range<T>::operator==(const self_type& rhs) const noexcept
  980. {
  981. return (m_start == rhs.m_start) && (m_size == rhs.m_size) && (m_step == rhs.m_step);
  982. }
  983. template <class T>
  984. inline bool xstepped_range<T>::operator!=(const self_type& rhs) const noexcept
  985. {
  986. return !(*this == rhs);
  987. }
  988. /***********************
  989. * xall implementation *
  990. ***********************/
  991. template <class T>
  992. inline xall<T>::xall(size_type size) noexcept
  993. : m_size(size)
  994. {
  995. }
  996. template <class T>
  997. template <class S, typename>
  998. inline xall<T>::operator xall<S>() const noexcept
  999. {
  1000. return xall<S>(static_cast<S>(m_size));
  1001. }
  1002. template <class T>
  1003. template <class S, typename>
  1004. inline xall<S> xall<T>::convert() const noexcept
  1005. {
  1006. return xall<S>(*this);
  1007. }
  1008. template <class T>
  1009. inline auto xall<T>::operator()(size_type i) const noexcept -> size_type
  1010. {
  1011. return i;
  1012. }
  1013. template <class T>
  1014. inline auto xall<T>::size() const noexcept -> size_type
  1015. {
  1016. return m_size;
  1017. }
  1018. template <class T>
  1019. inline auto xall<T>::step_size() const noexcept -> size_type
  1020. {
  1021. return 1;
  1022. }
  1023. template <class T>
  1024. inline auto xall<T>::step_size(std::size_t /*i*/, std::size_t n) const noexcept -> size_type
  1025. {
  1026. return static_cast<size_type>(n);
  1027. }
  1028. template <class T>
  1029. inline auto xall<T>::revert_index(std::size_t i) const noexcept -> size_type
  1030. {
  1031. return i;
  1032. }
  1033. template <class T>
  1034. inline bool xall<T>::contains(size_type i) const noexcept
  1035. {
  1036. return i < m_size;
  1037. }
  1038. template <class T>
  1039. inline bool xall<T>::operator==(const self_type& rhs) const noexcept
  1040. {
  1041. return m_size == rhs.m_size;
  1042. }
  1043. template <class T>
  1044. inline bool xall<T>::operator!=(const self_type& rhs) const noexcept
  1045. {
  1046. return !(*this == rhs);
  1047. }
  1048. /***************************
  1049. * xnewaxis implementation *
  1050. ***************************/
  1051. template <class T>
  1052. template <class S, typename>
  1053. inline xnewaxis<T>::operator xnewaxis<S>() const noexcept
  1054. {
  1055. return xnewaxis<S>();
  1056. }
  1057. template <class T>
  1058. template <class S, typename>
  1059. inline xnewaxis<S> xnewaxis<T>::convert() const noexcept
  1060. {
  1061. return xnewaxis<S>(*this);
  1062. }
  1063. template <class T>
  1064. inline auto xnewaxis<T>::operator()(size_type) const noexcept -> size_type
  1065. {
  1066. return 0;
  1067. }
  1068. template <class T>
  1069. inline auto xnewaxis<T>::size() const noexcept -> size_type
  1070. {
  1071. return 1;
  1072. }
  1073. template <class T>
  1074. inline auto xnewaxis<T>::step_size() const noexcept -> size_type
  1075. {
  1076. return 0;
  1077. }
  1078. template <class T>
  1079. inline auto xnewaxis<T>::step_size(std::size_t /*i*/, std::size_t /*n*/) const noexcept -> size_type
  1080. {
  1081. return 0;
  1082. }
  1083. template <class T>
  1084. inline auto xnewaxis<T>::revert_index(std::size_t i) const noexcept -> size_type
  1085. {
  1086. return i;
  1087. }
  1088. template <class T>
  1089. inline bool xnewaxis<T>::contains(size_type i) const noexcept
  1090. {
  1091. return i == 0;
  1092. }
  1093. template <class T>
  1094. inline bool xnewaxis<T>::operator==(const self_type& /*rhs*/) const noexcept
  1095. {
  1096. return true;
  1097. }
  1098. template <class T>
  1099. inline bool xnewaxis<T>::operator!=(const self_type& /*rhs*/) const noexcept
  1100. {
  1101. return true;
  1102. }
  1103. /******************************
  1104. * xkeep_slice implementation *
  1105. ******************************/
  1106. template <class T>
  1107. template <class C, typename>
  1108. inline xkeep_slice<T>::xkeep_slice(C& cont)
  1109. : m_raw_indices(cont.begin(), cont.end())
  1110. {
  1111. }
  1112. template <class T>
  1113. inline xkeep_slice<T>::xkeep_slice(container_type&& cont)
  1114. : m_raw_indices(std::move(cont))
  1115. {
  1116. }
  1117. template <class T>
  1118. template <class S>
  1119. inline xkeep_slice<T>::xkeep_slice(std::initializer_list<S> t)
  1120. : m_raw_indices(t.size())
  1121. {
  1122. std::transform(
  1123. t.begin(),
  1124. t.end(),
  1125. m_raw_indices.begin(),
  1126. [](auto t)
  1127. {
  1128. return static_cast<size_type>(t);
  1129. }
  1130. );
  1131. }
  1132. template <class T>
  1133. template <class S, typename>
  1134. inline xkeep_slice<T>::operator xkeep_slice<S>() const noexcept
  1135. {
  1136. xkeep_slice<S> ret;
  1137. using us_type = typename container_type::size_type;
  1138. us_type sz = static_cast<us_type>(size());
  1139. ret.m_raw_indices.resize(sz);
  1140. ret.m_indices.resize(sz);
  1141. std::transform(
  1142. m_raw_indices.cbegin(),
  1143. m_raw_indices.cend(),
  1144. ret.m_raw_indices.begin(),
  1145. [](const T& val)
  1146. {
  1147. return static_cast<S>(val);
  1148. }
  1149. );
  1150. std::transform(
  1151. m_indices.cbegin(),
  1152. m_indices.cend(),
  1153. ret.m_indices.begin(),
  1154. [](const T& val)
  1155. {
  1156. return static_cast<S>(val);
  1157. }
  1158. );
  1159. return ret;
  1160. }
  1161. template <class T>
  1162. template <class S, typename>
  1163. inline xkeep_slice<S> xkeep_slice<T>::convert() const noexcept
  1164. {
  1165. return xkeep_slice<S>(*this);
  1166. }
  1167. template <class T>
  1168. inline void xkeep_slice<T>::normalize(std::size_t shape)
  1169. {
  1170. m_indices.resize(m_raw_indices.size());
  1171. std::size_t sz = m_indices.size();
  1172. for (std::size_t i = 0; i < sz; ++i)
  1173. {
  1174. m_indices[i] = m_raw_indices[i] < 0 ? static_cast<size_type>(shape) + m_raw_indices[i]
  1175. : m_raw_indices[i];
  1176. }
  1177. }
  1178. template <class T>
  1179. inline auto xkeep_slice<T>::operator()(size_type i) const noexcept -> size_type
  1180. {
  1181. return m_indices.size() == size_type(1) ? m_indices.front() : m_indices[static_cast<std::size_t>(i)];
  1182. }
  1183. template <class T>
  1184. inline auto xkeep_slice<T>::size() const noexcept -> size_type
  1185. {
  1186. return static_cast<size_type>(m_raw_indices.size());
  1187. }
  1188. template <class T>
  1189. inline auto xkeep_slice<T>::step_size(std::size_t i, std::size_t n) const noexcept -> size_type
  1190. {
  1191. if (m_indices.size() == 1)
  1192. {
  1193. return 0;
  1194. }
  1195. if (i + n >= m_indices.size())
  1196. {
  1197. return m_indices.back() - m_indices[i] + 1;
  1198. }
  1199. else
  1200. {
  1201. return m_indices[i + n] - m_indices[i];
  1202. }
  1203. }
  1204. template <class T>
  1205. inline auto xkeep_slice<T>::revert_index(std::size_t i) const -> size_type
  1206. {
  1207. auto it = std::find(m_indices.begin(), m_indices.end(), i);
  1208. if (it != m_indices.end())
  1209. {
  1210. return std::distance(m_indices.begin(), it);
  1211. }
  1212. else
  1213. {
  1214. XTENSOR_THROW(std::runtime_error, "Index i (" + std::to_string(i) + ") not in indices of islice.");
  1215. }
  1216. }
  1217. template <class T>
  1218. inline bool xkeep_slice<T>::contains(size_type i) const noexcept
  1219. {
  1220. return (std::find(m_indices.begin(), m_indices.end(), i) == m_indices.end()) ? false : true;
  1221. }
  1222. template <class T>
  1223. inline bool xkeep_slice<T>::operator==(const self_type& rhs) const noexcept
  1224. {
  1225. return m_indices == rhs.m_indices;
  1226. }
  1227. template <class T>
  1228. inline bool xkeep_slice<T>::operator!=(const self_type& rhs) const noexcept
  1229. {
  1230. return !(*this == rhs);
  1231. }
  1232. /******************************
  1233. * xdrop_slice implementation *
  1234. ******************************/
  1235. template <class T>
  1236. template <class C, typename>
  1237. inline xdrop_slice<T>::xdrop_slice(C& cont)
  1238. : m_raw_indices(cont.begin(), cont.end())
  1239. {
  1240. }
  1241. template <class T>
  1242. inline xdrop_slice<T>::xdrop_slice(container_type&& cont)
  1243. : m_raw_indices(std::move(cont))
  1244. {
  1245. }
  1246. template <class T>
  1247. template <class S>
  1248. inline xdrop_slice<T>::xdrop_slice(std::initializer_list<S> t)
  1249. : m_raw_indices(t.size())
  1250. {
  1251. std::transform(
  1252. t.begin(),
  1253. t.end(),
  1254. m_raw_indices.begin(),
  1255. [](auto t)
  1256. {
  1257. return static_cast<size_type>(t);
  1258. }
  1259. );
  1260. }
  1261. template <class T>
  1262. template <class S, typename>
  1263. inline xdrop_slice<T>::operator xdrop_slice<S>() const noexcept
  1264. {
  1265. xdrop_slice<S> ret;
  1266. ret.m_raw_indices.resize(m_raw_indices.size());
  1267. ret.m_indices.resize(m_indices.size());
  1268. std::transform(
  1269. m_raw_indices.cbegin(),
  1270. m_raw_indices.cend(),
  1271. ret.m_raw_indices.begin(),
  1272. [](const T& val)
  1273. {
  1274. return static_cast<S>(val);
  1275. }
  1276. );
  1277. std::transform(
  1278. m_indices.cbegin(),
  1279. m_indices.cend(),
  1280. ret.m_indices.begin(),
  1281. [](const T& val)
  1282. {
  1283. return static_cast<S>(val);
  1284. }
  1285. );
  1286. std::transform(
  1287. m_inc.cbegin(),
  1288. m_inc.cend(),
  1289. std::inserter(ret.m_inc, ret.m_inc.begin()),
  1290. [](const auto& val)
  1291. {
  1292. return std::make_pair(static_cast<S>(val.first), static_cast<S>(val.second));
  1293. }
  1294. );
  1295. ret.m_size = static_cast<S>(m_size);
  1296. return ret;
  1297. }
  1298. template <class T>
  1299. template <class S, typename>
  1300. inline xdrop_slice<S> xdrop_slice<T>::convert() const noexcept
  1301. {
  1302. return xdrop_slice<S>(*this);
  1303. }
  1304. template <class T>
  1305. inline void xdrop_slice<T>::normalize(std::size_t shape)
  1306. {
  1307. m_size = static_cast<size_type>(shape - m_raw_indices.size());
  1308. m_indices.resize(m_raw_indices.size());
  1309. std::size_t sz = m_indices.size();
  1310. for (std::size_t i = 0; i < sz; ++i)
  1311. {
  1312. m_indices[i] = m_raw_indices[i] < 0 ? static_cast<size_type>(shape) + m_raw_indices[i]
  1313. : m_raw_indices[i];
  1314. }
  1315. size_type cum = size_type(0);
  1316. size_type prev_cum = cum;
  1317. for (std::size_t i = 0; i < sz; ++i)
  1318. {
  1319. std::size_t ind = i;
  1320. size_type d = m_indices[i];
  1321. while (i + 1 < sz && m_indices[i + 1] == m_indices[i] + 1)
  1322. {
  1323. ++i;
  1324. }
  1325. cum += (static_cast<size_type>(i) - static_cast<size_type>(ind)) + 1;
  1326. m_inc[d - prev_cum] = cum;
  1327. prev_cum = cum;
  1328. }
  1329. }
  1330. template <class T>
  1331. inline auto xdrop_slice<T>::operator()(size_type i) const noexcept -> size_type
  1332. {
  1333. if (m_inc.empty() || i < m_inc.begin()->first)
  1334. {
  1335. return i;
  1336. }
  1337. else
  1338. {
  1339. auto iter = --m_inc.upper_bound(i);
  1340. return i + iter->second;
  1341. }
  1342. }
  1343. template <class T>
  1344. inline auto xdrop_slice<T>::size() const noexcept -> size_type
  1345. {
  1346. return m_size;
  1347. }
  1348. template <class T>
  1349. inline auto xdrop_slice<T>::step_size(std::size_t i, std::size_t n) const noexcept -> size_type
  1350. {
  1351. if (i + n >= static_cast<std::size_t>(m_size))
  1352. {
  1353. return (*this)(static_cast<size_type>(m_size - 1)) - (*this)(static_cast<size_type>(i)) + 1;
  1354. }
  1355. else
  1356. {
  1357. return (*this)(static_cast<size_type>(i + n)) - (*this)(static_cast<size_type>(i));
  1358. }
  1359. }
  1360. template <class T>
  1361. inline auto xdrop_slice<T>::revert_index(std::size_t i) const -> size_type
  1362. {
  1363. if (i < m_inc.begin()->first)
  1364. {
  1365. return i;
  1366. }
  1367. else
  1368. {
  1369. auto iter = --m_inc.lower_bound(i);
  1370. auto check = iter->first + iter->second;
  1371. if (check > i)
  1372. {
  1373. --iter;
  1374. }
  1375. return i - iter->second;
  1376. }
  1377. }
  1378. template <class T>
  1379. inline bool xdrop_slice<T>::contains(size_type i) const noexcept
  1380. {
  1381. return (std::find(m_indices.begin(), m_indices.end(), i) == m_indices.end()) ? true : false;
  1382. }
  1383. template <class T>
  1384. inline bool xdrop_slice<T>::operator==(const self_type& rhs) const noexcept
  1385. {
  1386. return m_indices == rhs.m_indices;
  1387. }
  1388. template <class T>
  1389. inline bool xdrop_slice<T>::operator!=(const self_type& rhs) const noexcept
  1390. {
  1391. return !(*this == rhs);
  1392. }
  1393. }
  1394. #undef XTENSOR_CONSTEXPR
  1395. #endif