.. _program_listing_file__tmp_ws_src_proxsuite_include_proxsuite_linalg_veg_vec.hpp: Program Listing for File vec.hpp ================================ |exhale_lsh| :ref:`Return to documentation for file ` (``/tmp/ws/src/proxsuite/include/proxsuite/linalg/veg/vec.hpp``) .. |exhale_lsh| unicode:: U+021B0 .. UPWARDS ARROW WITH TIP LEFTWARDS .. code-block:: cpp #ifndef VEG_VECTOR_HPP_QWFSH3ROS #define VEG_VECTOR_HPP_QWFSH3ROS #include "proxsuite/linalg/veg/internal/delete_special_members.hpp" #include "proxsuite/linalg/veg/memory/alloc.hpp" #include "proxsuite/linalg/veg/internal/collection_algo.hpp" #include "proxsuite/linalg/veg/internal/narrow.hpp" #include "proxsuite/linalg/veg/slice.hpp" #include "proxsuite/linalg/veg/util/unreachable.hpp" #include "proxsuite/linalg/veg/internal/prologue.hpp" namespace proxsuite { namespace linalg { namespace veg { namespace _detail { namespace _vector { namespace adl { struct AdlBase {}; } // namespace adl } // namespace _vector } // namespace _detail namespace _detail { template VEG_INLINE constexpr auto min2(T a, T b) noexcept -> T { return (static_cast(a) < static_cast(b)) ? VEG_FWD(a) : VEG_FWD(b); } template VEG_INLINE constexpr auto max2(T a, T b) noexcept -> T { return (static_cast(a) < static_cast(b)) ? VEG_FWD(b) : VEG_FWD(a); } namespace _collections { VEG_INLINE constexpr auto vector_grow_compute(usize current_cap) noexcept -> usize { return current_cap + current_cap; } // new_cap must be larger than current_cap VEG_INLINE constexpr auto vector_grow_choose(usize current_cap, usize new_cap) noexcept -> usize { return _detail::max2(_collections::vector_grow_compute(current_cap), new_cap); } template auto relocate(void* out, void const* in, usize nbytes) noexcept -> void*; struct relocate_pointer_trivial { static constexpr void* (*value)(void*, void const*, usize) = &mem::memmove; }; template::value> struct relocate_pointer : relocate_pointer_trivial {}; template struct relocate_pointer { static constexpr void* (*value)(void*, void const*, usize) = _collections::relocate; }; } // namespace _collections } // namespace _detail namespace collections { template struct relocate_pointer : _detail::_collections::relocate_pointer {}; } // namespace collections namespace vector { template struct RawVector { T* data; T* end; T* end_alloc; VEG_INLINE constexpr auto len() const noexcept -> usize { return static_cast(end - data); } VEG_INLINE constexpr auto cap() const noexcept -> usize { return static_cast(end_alloc - data); } }; } // namespace vector namespace _detail { namespace _collections { template struct CloneImpl; template struct CloneFn { RefMut alloc; RefMut cloner; T const* in; VEG_CPP14(constexpr) VEG_INLINE auto operator()() VEG_NOEXCEPT_IF(NoThrow) -> T { return mem::Cloner::clone(cloner, ref(*in), alloc); } }; template<> struct CloneImpl { template static VEG_CPP14(constexpr) void fn( // RefMut alloc, RefMut cloner, T* out, T* out_end, T const* in) VEG_NOEXCEPT { for (; out < out_end; ++out, ++in) { mem::construct_with(out, CloneFn{ alloc, cloner, in }); } } }; template<> struct CloneImpl { template static void fn( // RefMut alloc, RefMut cloner, T* out, T* out_end, T const* in) VEG_NOEXCEPT_IF(false) { Defer> _{ { alloc, cloner, out, out_end } }; for (; _.fn.ptr < _.fn.ptr_end; ++_.fn.ptr, ++in) { mem::construct_with(_.fn.ptr, CloneFn{ _.fn.alloc, _.fn.cloner, in, }); } } }; template VEG_CPP14(constexpr) void slice_clone(RefMut alloc, RefMut cloner, T* out, T* out_end, T const* in) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_clone)) { CloneImpl)>::fn( alloc, cloner, out, out_end, in); } template VEG_CPP14(constexpr) void slice_clone_from(RefMut alloc, RefMut cloner, T* out, T* out_end, T const* in) VEG_NOEXCEPT_IF(true) { while (true) { if (out == out_end) { break; } mem::Cloner::clone_from( // RefMut(cloner), mut(*out), ref(*in), RefMut(alloc)); ++out; ++in; } } template auto relocate(void* out, void const* in, usize nbytes) noexcept -> void* { T* out_T = static_cast(out); T* in_T = const_cast(static_cast(in)); usize n = nbytes / sizeof(T); for (usize i = 0; i < n; ++i) { mem::construct_at(out_T + i, static_cast(in_T[i])); in_T[i].~T(); } return out; } template auto relocate_backward(void* out, void const* in, usize nbytes) noexcept -> void* { T* out_T = static_cast(out); T* in_T = const_cast(static_cast(in)); usize n = nbytes / sizeof(T); for (usize i = 0; i < n; ++i) { mem::construct_at(out_T + (n - i - 1), static_cast(in_T[n - i - 1])); in_T[n - i - 1].~T(); } return out; } template struct AllocCleanup { RefMut alloc; void* data; mem::Layout layout; VEG_INLINE VEG_CPP14(constexpr) void operator()() VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_dealloc)) { if (data != nullptr) { mem::Alloc::dealloc( RefMut(alloc), static_cast(data), mem::Layout(layout)); } } }; template auto alloc_and_copy(RefMut alloc, RefMut cloner, T const* data, usize len) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_alloc) && VEG_CONCEPT(alloc::nothrow_clone)) -> mem::AllocBlock { mem::AllocBlock block = mem::Alloc::alloc( RefMut(alloc), mem::Layout{ sizeof(T) * usize(len), alignof(T) }); // if copying fails, this takes care of deallocating Defer> _{ { alloc, block.data, mem::Layout{ block.byte_cap, alignof(T) }, } }; // copy construct elements _collections::slice_clone(_.fn.alloc, cloner, static_cast(block.data), static_cast(block.data) + len, data); _.fn.data = nullptr; return block; } template auto realloc_and_append( // RefMut alloc, RefMut cloner, mem::AllocBlock out, usize out_len, T const* in, usize in_len) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow) && VEG_CONCEPT(alloc::nothrow_clone)) -> mem::AllocBlock { if (in_len == 0) { return out; } if (out.byte_cap >= (in_len + out_len) * sizeof(T)) { mem::AllocBlock block = mem::Alloc::grow( RefMut(alloc), static_cast(out.data), mem::Layout{ out.byte_cap, alignof(T) }, out_len * sizeof(T), mem::RelocFn{ collections::relocate_pointer::value }); // if copying fails, this takes care of deallocating Defer> _{ { alloc, block.data, mem::Layout{ block.byte_cap, alignof(T) }, } }; // if copying fails, this takes care of destroying Defer> destroy{ { _.fn.alloc, cloner, static_cast(block.data), static_cast(block.data) + out_len, } }; // copy construct elements _collections::slice_clone( // destroy.fn.alloc, destroy.fn.cloner, static_cast(block.data) + out_len, static_cast(block.data) + in_len, in); // disable destruction destroy.fn.ptr = nullptr; destroy.fn.ptr_end = nullptr; // disable deallocation _.fn.data = nullptr; out = block; } else { // copy construct elements _collections::slice_clone( // alloc, cloner, static_cast(out.data) + out_len, static_cast(out.data) + in_len, in); } return out; } template struct CloneFromImpl; template<> struct CloneFromImpl { template static void fn(RefMut lhs_alloc, RefMut cloner, vector::RawVector& lhs_raw, Ref rhs_alloc, vector::RawVector const rhs_raw) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copy_assignable) && // VEG_CONCEPT(alloc::nothrow_alloc) && // VEG_CONCEPT(alloc::nothrow_clone) && // VEG_CONCEPT(alloc::nothrow_clone_from)) { vector::RawVector lhs_copy = lhs_raw; usize rhs_len = (rhs_raw.end - rhs_raw.data); if (!(lhs_alloc == rhs_alloc)) { T* data = lhs_copy.data; T* data_end = lhs_copy.end; // clean up old alloc _collections::backward_destroy(lhs_alloc, cloner, data, data_end); // assign before deallocation in case it fails lhs_raw = {}; lhs_copy = {}; // don't need to deallocate on backward_destroy failure, since lhs can // still access and reuse the allocation mem::Alloc::dealloc( // RefMut(lhs_alloc), static_cast(data), mem::Layout{ (lhs_copy.end_alloc - lhs_copy.data) * sizeof(T), alignof(T) }); } lhs_alloc.get() = rhs_alloc.get(); if (lhs_raw.data == nullptr) { usize len = rhs_raw.end - rhs_raw.data; mem::AllocBlock blk = _collections::alloc_and_copy(lhs_alloc, cloner, rhs_raw.data, len); T* data = static_cast(blk.data); lhs_raw = { data, data + len, data + blk.byte_cap / sizeof(T), }; return; } usize assign_len = _detail::min2(lhs_copy.len(), rhs_raw.len()); // copy assign until the shared len _collections::slice_clone_from( // lhs_alloc, cloner, lhs_copy.data, lhs_copy.data + assign_len, rhs_raw.data); // destroy from the shared len until end of lhs lhs_raw.end = lhs_raw.data + assign_len; _collections::backward_destroy( // lhs_alloc, cloner, lhs_copy.data + assign_len, lhs_copy.end); // pass allocation to realloc_and_append lhs_raw = {}; // realloc and copy construct new elements until end of rhs mem::AllocBlock block = _collections::realloc_and_append( lhs_alloc, cloner, mem::AllocBlock{ lhs_copy.data, (lhs_copy.end_alloc - lhs_copy.data) * sizeof(T), }, // out assign_len, // out_len rhs_raw.data + assign_len, // in rhs_len - assign_len); // in_len lhs_raw = vector::RawVector{ static_cast(block.data), static_cast(block.data) + rhs_len, static_cast(block.data) + block.byte_cap / sizeof(T), }; } }; template<> struct CloneFromImpl { template static void fn(RefMut lhs_alloc, RefMut cloner, vector::RawVector& lhs_raw, Ref rhs_alloc, vector::RawVector const rhs_raw) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copy_assignable) && // VEG_CONCEPT(alloc::nothrow_dealloc) && VEG_CONCEPT(alloc::nothrow_alloc)) { vector::RawVector lhs_copy = lhs_raw; bool need_to_realloc = (!(lhs_alloc.get() == rhs_alloc.get()) || (lhs_copy.cap() < rhs_raw.len())); if (need_to_realloc) { T* data = lhs_copy.data; usize cap = lhs_copy.cap(); // assign before deallocation in case it fails lhs_raw = {}; mem::Alloc::dealloc( // RefMut(lhs_alloc), static_cast(data), mem::Layout{ cap * sizeof(T), alignof(T) }); lhs_copy = {}; } lhs_alloc.get() = rhs_alloc.get(); // allocate and copy all elements if (need_to_realloc) { mem::AllocBlock block = _collections::alloc_and_copy( // lhs_alloc, cloner, rhs_raw.data, rhs_raw.len()); lhs_raw.data = static_cast(block.data); lhs_raw.end_alloc = lhs_raw.data + block.byte_cap / sizeof(T); } else { _collections::slice_clone( // lhs_alloc, cloner, lhs_copy.data, lhs_copy.data + rhs_raw.len(), rhs_raw.data); } lhs_raw.end = lhs_raw.data + rhs_raw.len(); } }; template VEG_INLINE void clone_from(RefMut lhs_alloc, RefMut cloner, vector::RawVector& lhs_raw, Ref rhs_alloc, vector::RawVector const rhs_raw) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copy_assignable) && // VEG_CONCEPT(alloc::nothrow_alloc) && // VEG_CONCEPT(alloc::nothrow_clone) && // VEG_CONCEPT(alloc::nothrow_clone_from)) { _collections::CloneFromImpl< mem::Cloner::template trivial_clone::value>::fn(lhs_alloc, cloner, lhs_raw, rhs_alloc, rhs_raw); } } // namespace _collections } // namespace _detail namespace _detail { namespace _vector { template struct RawVectorMoveRaii /* NOLINT */ { vector::RawVector _ = {}; RawVectorMoveRaii() = default; RawVectorMoveRaii(FromRawParts /*tag*/, vector::RawVector inner) VEG_NOEXCEPT : _{ inner } {}; VEG_INLINE VEG_CPP14(constexpr) RawVectorMoveRaii(RawVectorMoveRaii&& rhs) VEG_NOEXCEPT : _{ rhs._ } { rhs._ = {}; } VEG_INLINE VEG_CPP14(constexpr) RawVectorMoveRaii(RawVectorMoveRaii const& /*rhs*/) VEG_NOEXCEPT { } }; template struct VecAlloc : // alloc manager needs to be constructed first Tuple> { using Tuple>::Tuple; public: VecAlloc(VecAlloc const&) = default; VecAlloc(VecAlloc&&) = default; auto operator=(VecAlloc const&) -> VecAlloc& = default; auto operator=(VecAlloc&&) -> VecAlloc& = default; VEG_INLINE ~VecAlloc() VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_dealloc)) { vector::RawVector raw = (*this)[1_c]._; if ((raw.data != nullptr) && (raw.end_alloc != 0)) { // FIXME: if asan is enabled, before sanitizing make sure that: // - begin is 8 byte aligned // - either: // - end is 8 byte aligned // - A is the SystemAlloc #if VEG_HAS_ASAN _detail::__sanitizer_annotate_contiguous_container( // raw.data, raw.end_alloc, raw.data, raw.end_alloc); #endif mem::Alloc::dealloc( mut((*this)[0_c]), static_cast(raw.data), mem::Layout{ usize(raw.end_alloc - raw.data) * sizeof(T), alignof(T) }); } } }; } // namespace _vector } // namespace _detail #if VEG_HAS_ASAN #define __VEG_ASAN_ANNOTATE() /* NOLINT */ \ if (ptr() != nullptr) { \ _detail::__sanitizer_annotate_contiguous_container( \ ptr(), ptr() + capacity(), ptr() + len(), ptr() + capacity()); \ } \ auto&& _veglib_asan = defer([&]() noexcept { \ if (ptr() != nullptr) { \ _detail::__sanitizer_annotate_contiguous_container( \ ptr(), ptr() + capacity(), ptr() + capacity(), ptr() + len()); \ } \ }); \ (void)_veglib_asan #else #define __VEG_ASAN_ANNOTATE() /* NOLINT */ (void)0; #endif namespace _detail { namespace _collections { template::value, mem::CopyAvailable Copy = mem::CopyAvailableFor::value> struct VecImpl { private: _detail::_vector::VecAlloc _; public: VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto alloc_ref() const VEG_NOEXCEPT->Ref { return ref(_[0_c]); } VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto raw_ref() const VEG_NOEXCEPT->Ref> { return ref(_[1_c]._); } VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto alloc_mut(Unsafe /*tag*/) VEG_NOEXCEPT->RefMut { return mut(_[0_c]); } VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto raw_mut(Unsafe /*tag*/) VEG_NOEXCEPT->RefMut> { return mut(_[1_c]._); } private: VEG_INLINE void _reserve_grow_exact_impl(Unsafe /*tag*/, usize new_cap) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow)) { __VEG_ASAN_ANNOTATE(); vector::RawVector& raw = this->raw_mut(unsafe).get(); auto len = usize(this->len()); mem::AllocBlock new_block = mem::Alloc::grow( this->alloc_mut(unsafe), static_cast(raw.data), mem::Layout{ usize(byte_capacity()), alignof(T), }, new_cap * sizeof(T), mem::RelocFn{ collections::relocate_pointer::value }); T* data = static_cast(new_block.data); raw = { data, data + len, data + new_block.byte_cap / sizeof(T), }; } VEG_INLINE void _reserve_grow_exact(Unsafe /*tag*/, isize new_cap) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow)) { isize old_len = len(); this->_reserve_grow_exact_impl(unsafe, usize(new_cap)); meta::unreachable_if(capacity() < new_cap); meta::unreachable_if(len() != old_len); } VEG_INLINE void _reserve_one_more(Unsafe /*tag*/) { this->_reserve_grow_exact( unsafe, 1 + isize(_detail::_collections::vector_grow_compute(usize(capacity())))); } static_assert(VEG_CONCEPT(nothrow_move_assignable), "."); static_assert(VEG_CONCEPT(nothrow_movable), "."); public: VEG_INLINE ~VecImpl() VEG_NOEXCEPT_IF(Dtor == mem::DtorAvailable::yes_nothrow && VEG_CONCEPT(alloc::nothrow_dealloc)) { static_assert(Dtor == mem::DtorAvailableFor::value, "."); vector::RawVector raw = this->raw_ref().get(); if (raw.data != nullptr) { this->clear(); } } VEG_INLINE VecImpl() = default; VEG_INLINE VecImpl(Unsafe /*unsafe*/, FromRawParts /*tag*/, vector::RawVector rawvec, A alloc) VEG_NOEXCEPT : _{ tuplify, VEG_FWD(alloc), _detail::_vector::RawVectorMoveRaii{ from_raw_parts, rawvec }, } { } VEG_INLINE VecImpl(VecImpl&&) = default; VEG_INLINE auto operator=(VecImpl&& rhs) -> VecImpl& { { auto cleanup = static_cast(*this); } // can't fail this->alloc_mut(unsafe).get() = static_cast(rhs.alloc_mut(unsafe).get()); this->raw_mut(unsafe).get() = rhs.raw_ref().get(); rhs.raw_mut(unsafe).get() = {}; return *this; }; explicit VecImpl(VecImpl const& rhs) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copyable) && // VEG_CONCEPT(alloc::nothrow_alloc) && Copy == mem::CopyAvailable::yes_nothrow) : _{ rhs._ } { static_assert(Copy == mem::CopyAvailableFor::value, "."); __VEG_ASAN_ANNOTATE(); vector::RawVector rhs_raw = rhs.raw_ref().get(); mem::AllocBlock blk = _detail::_collections::alloc_and_copy(this->alloc_mut(unsafe), mut(mem::DefaultCloner{}), rhs_raw.data, usize(rhs.len())); T* data = static_cast(blk.data); this->raw_mut(unsafe).get() = vector::RawVector{ data, data + usize(rhs.len()), data + blk.byte_cap / sizeof(T), }; } auto operator=(VecImpl const& rhs) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copy_assignable) && VEG_CONCEPT(alloc::nothrow_alloc) && Copy == mem::CopyAvailable::yes_nothrow) -> VecImpl& { static_assert(Copy == mem::CopyAvailableFor::value, "."); if (this != mem::addressof(rhs)) { __VEG_ASAN_ANNOTATE(); _detail::_collections::clone_from(this->alloc_mut(unsafe), mut(mem::DefaultCloner{}), this->raw_mut(unsafe).get(), rhs.alloc_ref(), rhs.raw_ref().get()); } return *this; } VEG_INLINE void reserve_exact(isize new_cap) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow)) { if (new_cap > capacity()) { this->_reserve_grow_exact(unsafe, new_cap); } } VEG_INLINE void reserve(isize new_cap) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow)) { auto cap = capacity(); if (new_cap > cap) { this->reserve_exact(isize( _detail::_collections::vector_grow_choose(usize(cap), usize(new_cap)))); } } VEG_INLINE void pop_several_unchecked(Unsafe unsafe, isize n) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_destructible)) { VEG_DEBUG_ASSERT_ALL_OF(0 <= n, n <= len()); __VEG_ASAN_ANNOTATE(); vector::RawVector& raw = this->raw_mut(unsafe).get(); T* end = raw.end; raw.end -= n; _detail::_collections::backward_destroy( this->alloc_mut(unsafe), mut(mem::DefaultCloner{}), end - n, end); } VEG_INLINE void pop_several(isize n) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_destructible)) { VEG_ASSERT_ALL_OF(0 <= n, n <= len()); pop_several_unchecked(unsafe, n); } VEG_INLINE auto pop_unchecked(Unsafe /*unsafe*/) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable)) -> T { VEG_DEBUG_ASSERT(1 <= len()); T* last = raw_ref().get().end - 1; T t = static_cast(*last); --raw_mut(unsafe).get().end; mem::destroy_at(last); return t; } VEG_INLINE auto pop_mid_unchecked(Unsafe /*unsafe*/, isize i) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable)) -> T { VEG_DEBUG_ASSERT(0 <= i); VEG_DEBUG_ASSERT(i < len()); T* elem = raw_ref().get().data + i; T t = static_cast(*elem); // this block does not throw { mem::destroy_at(elem); _detail::_collections::relocate( // elem, elem + 1, sizeof(T) * usize(len() - i - 1)); } --raw_mut(unsafe).get().end; return t; } VEG_INLINE auto pop() VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable)) -> T { VEG_ASSERT(1 <= len()); return pop_unchecked(unsafe); } VEG_INLINE auto pop_mid(isize i) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable)) -> T { VEG_ASSERT(0 <= i); VEG_ASSERT(i < len()); return pop_mid_unchecked(unsafe, i); } VEG_INLINE void clear() VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_destructible)) { pop_several_unchecked(unsafe, len()); } VEG_TEMPLATE(typename U = T, requires(VEG_CONCEPT(constructible)), void resize, (n, isize)) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow) && VEG_CONCEPT(nothrow_constructible)) { vector::RawVector& raw = raw_mut(unsafe).get(); if (n > len()) { reserve(n); { __VEG_ASAN_ANNOTATE(); ::new (static_cast(ptr_mut() + len())) T[usize(n - len())]{}; raw.end = raw.data + n; } } else { pop_several_unchecked(unsafe, len() - n); } } VEG_TEMPLATE(typename U = T, requires(VEG_CONCEPT(constructible)), void resize_for_overwrite, (n, isize)) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow) && VEG_CONCEPT(nothrow_constructible)) { vector::RawVector& raw = raw_mut(unsafe).get(); if (n > len()) { reserve(n); { __VEG_ASAN_ANNOTATE(); ::new (static_cast(ptr_mut() + len())) T[usize(n - len())]; raw.end = raw.data + n; } } else { pop_several_unchecked(unsafe, len() - n); } } VEG_TEMPLATE(typename Fn, requires(VEG_CONCEPT(fn_once)), VEG_INLINE void push_mid_with, (fn, Fn), (i, isize)) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_fn_once) && VEG_CONCEPT(alloc::nothrow_alloc)) { static_assert(VEG_CONCEPT(nothrow_fn_once), "."); VEG_ASSERT_ALL_OF(0 <= i, i <= len()); reserve(len() + 1); { __VEG_ASAN_ANNOTATE(); vector::RawVector& raw = this->raw_mut(unsafe).get(); T* elem = raw.data + i; _detail::_collections::relocate_backward( // elem + 1, elem, sizeof(T) * usize(raw.end - elem)); mem::construct_with(elem, VEG_FWD(fn)); ++raw.end; } } VEG_INLINE void push_mid(T value, isize i) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable) && VEG_CONCEPT(alloc::nothrow_alloc)) { this->push_mid_with(_detail::MoveFn{ VEG_FWD(value) }, i); } VEG_TEMPLATE(typename Fn, requires(VEG_CONCEPT(fn_once)), VEG_INLINE void push_with_unchecked, (/*tag*/, Unsafe), (fn, Fn)) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_fn_once) && VEG_CONCEPT(alloc::nothrow_alloc)) { __VEG_ASAN_ANNOTATE(); vector::RawVector& raw = this->raw_mut(unsafe).get(); mem::construct_with(raw.end, VEG_FWD(fn)); ++raw.end; } VEG_TEMPLATE(typename Fn, requires(VEG_CONCEPT(fn_once)), VEG_INLINE void push_with, (fn, Fn)) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_fn_once) && VEG_CONCEPT(alloc::nothrow_alloc)) { vector::RawVector raw = this->raw_ref().get(); if (HEDLEY_UNLIKELY(raw.end == raw.end_alloc)) { this->_reserve_one_more(unsafe); } this->push_with_unchecked(unsafe, VEG_FWD(fn)); } VEG_INLINE void push(T value) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable) && VEG_CONCEPT(alloc::nothrow_alloc)) { this->push_with(_detail::MoveFn{ VEG_FWD(value) }); } VEG_INLINE void push_unchecked(Unsafe /*tag*/, T value) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable)) { this->push_with_unchecked(unsafe, _detail::MoveFn{ VEG_FWD(value) }); } VEG_NODISCARD VEG_INLINE auto as_ref() const VEG_NOEXCEPT->Slice { return { unsafe, from_raw_parts, ptr(), len() }; } VEG_NODISCARD VEG_INLINE auto as_mut() VEG_NOEXCEPT->SliceMut { return { unsafe, from_raw_parts, ptr_mut(), len() }; } VEG_NODISCARD VEG_INLINE auto ptr() const VEG_NOEXCEPT->T const* { return this->raw_ref().get().data; } VEG_NODISCARD VEG_INLINE auto ptr_mut() VEG_NOEXCEPT->T* { return const_cast(this->ptr()); } VEG_NODISCARD VEG_INLINE auto len() const VEG_NOEXCEPT->isize { auto& raw = this->raw_ref().get(); return isize(raw.end - raw.data); } VEG_NODISCARD VEG_INLINE auto capacity() const VEG_NOEXCEPT->isize { auto& raw = this->raw_ref().get(); return isize(raw.end_alloc - raw.data); } VEG_NODISCARD VEG_INLINE auto byte_capacity() const VEG_NOEXCEPT->isize { auto& raw = this->raw_ref().get(); return meta::is_consteval() ? (raw.end_alloc - raw.data) * isize(sizeof(T)) : (reinterpret_cast(raw.end_alloc) - reinterpret_cast(raw.data)); } VEG_NODISCARD VEG_INLINE auto operator[](isize i) const VEG_NOEXCEPT->T const& { VEG_ASSERT(usize(i) < usize(len())); return this->ptr()[i]; } VEG_NODISCARD VEG_INLINE auto operator[](isize i) VEG_NOEXCEPT->T& { return const_cast(static_cast(this)->operator[](i)); } }; } // namespace _collections } // namespace _detail template < typename T, typename A = mem::SystemAlloc, mem::DtorAvailable Dtor = mem::DtorAvailableFor::value, mem::CopyAvailable Copy = mem::CopyAvailableFor::value> struct Vec : private _detail::_vector::adl::AdlBase, private meta::if_t< // Copy == mem::CopyAvailable::no, _detail::NoCopy, _detail::Empty>, public _detail::_collections::VecImpl { using _detail::_collections::VecImpl::VecImpl; Vec() = default; VEG_EXPLICIT_COPY(Vec); }; template struct cpo::is_trivially_relocatable> : cpo::is_trivially_relocatable {}; template struct cpo::is_trivially_constructible> : cpo::is_trivially_constructible {}; } // namespace veg } // namespace linalg } // namespace proxsuite #undef __VEG_ASAN_ANNOTATE #include "proxsuite/linalg/veg/internal/epilogue.hpp" #endif /* end of include guard VEG_VECTOR_HPP_QWFSH3ROS */