diff --git a/changelog.d/20260419_182500_issue312_pptr_access_modes.md b/changelog.d/20260419_182500_issue312_pptr_access_modes.md new file mode 100644 index 00000000..a30526ba --- /dev/null +++ b/changelog.d/20260419_182500_issue312_pptr_access_modes.md @@ -0,0 +1,6 @@ +--- +bump: minor +--- + +### Added +- Added explicit checked and unchecked `pptr` resolution APIs so stale persistent pointers can be detected without removing internal raw access. diff --git a/include/pmm/forest_domain_mixin.inc b/include/pmm/forest_domain_mixin.inc index c9df22eb..cf1d1e23 100644 --- a/include/pmm/forest_domain_mixin.inc +++ b/include/pmm/forest_domain_mixin.inc @@ -67,8 +67,8 @@ static forest_domain* find_domain_by_symbol_unlocked( pptr symbol ) { if ( symbol.is_null() ) return nullptr; - pstringview* sym = resolve( symbol ); - if ( sym == nullptr ) + const char* sym_str = pstringview_c_str_unlocked( symbol ); + if ( sym_str == nullptr ) return nullptr; forest_registry* reg = forest_registry_root_unlocked(); if ( reg == nullptr ) @@ -76,7 +76,7 @@ static forest_domain* find_domain_by_symbol_unlocked( pptr symbol ) for ( std::uint16_t i = 0; i < reg->domain_count; ++i ) { if ( reg->domains[i].symbol_offset == symbol.offset() || - std::strncmp( reg->domains[i].name, sym->c_str(), detail::kForestDomainNameCapacity ) == 0 ) + std::strncmp( reg->domains[i].name, sym_str, detail::kForestDomainNameCapacity ) == 0 ) { reg->domains[i].symbol_offset = symbol.offset(); return ®->domains[i]; @@ -194,10 +194,10 @@ static pptr intern_symbol_unlocked( const char* s ) noexcept symbol_domain->root_offset, [&]( pptr cur ) -> int { - pstringview* obj = resolve( cur ); - return ( obj != nullptr ) ? std::strcmp( s, obj->c_str() ) : 0; + const char* cur_str = pstringview_c_str_unlocked( cur ); + return ( cur_str != nullptr ) ? std::strcmp( s, cur_str ) : 0; }, - []( pptr p ) -> pstringview* { return resolve( p ); } ); + []( pptr p ) -> const char* { return pstringview_c_str_unlocked( p ); } ); if ( !found.is_null() ) return found; @@ -207,27 +207,32 @@ static pptr intern_symbol_unlocked( const char* s ) noexcept if ( raw == nullptr ) return pptr(); - std::uint8_t* base = _backend.base_ptr(); - pptr new_node( detail::ptr_to_granule_idx( base, raw ) ); + pptr new_node = make_pptr_from_raw( raw ); + void* public_raw = raw_user_ptr_from_pptr( new_node ); + if ( public_raw == nullptr ) + { + deallocate_unlocked( raw ); + return pptr(); + } // Use memcpy to avoid UB on potentially misaligned raw pointer (ASan/UBSan fix). - std::memcpy( raw, &len, sizeof( len ) ); - char* str_dst = static_cast( raw ) + offsetof( pstringview, str ); + std::memcpy( public_raw, &len, sizeof( len ) ); + char* str_dst = static_cast( public_raw ) + offsetof( pstringview, str ); std::memcpy( str_dst, s, static_cast( len ) + 1 ); detail::avl_init_node( new_node ); - if ( !lock_block_permanent_unlocked( raw ) ) + if ( !lock_block_permanent_unlocked( public_raw ) ) return pptr(); // Re-derive c_str() pointer for comparisons using offset-based access. - const char* new_str = static_cast( raw ) + offsetof( pstringview, str ); + const char* new_str = static_cast( public_raw ) + offsetof( pstringview, str ); detail::avl_insert( new_node, symbol_domain->root_offset, [&]( pptr cur ) -> bool { - pstringview* cur_obj = resolve( cur ); - return ( cur_obj != nullptr ) && ( std::strcmp( new_str, cur_obj->c_str() ) < 0 ); + const char* cur_str = pstringview_c_str_unlocked( cur ); + return ( cur_str != nullptr ) && ( std::strcmp( new_str, cur_str ) < 0 ); }, - []( pptr p ) -> pstringview* { return resolve( p ); } ); + []( pptr p ) -> const char* { return pstringview_c_str_unlocked( p ); } ); return new_node; } @@ -496,3 +501,156 @@ static void for_each_free_block_inorder( const std::uint8_t* base, const detail: // Visit right subtree (larger blocks) for_each_free_block_inorder( base, hdr, right_off, depth + 1, callback ); } + +/// @brief Find the mutable block header for a user-data pointer (or nullptr). +static pmm::Block* find_block_from_user_ptr( void* ptr ) noexcept +{ + std::uint8_t* base = _backend.base_ptr(); + detail::ManagerHeader* hdr = get_header( base ); + if constexpr ( sizeof( Block ) % address_traits::granule_size != 0 ) + { + constexpr std::size_t rounded_header_size = + static_cast( kBlockHdrGranules ) * address_traits::granule_size; + if ( ptr != nullptr && base != nullptr ) + { + auto* raw = static_cast( ptr ); + if ( raw >= base + rounded_header_size && raw < base + static_cast( hdr->total_size ) ) + { + std::uint8_t* cand = raw - rounded_header_size; + if ( ( static_cast( cand - base ) % address_traits::granule_size ) == 0 && + cand + sizeof( Block ) <= base + static_cast( hdr->total_size ) && + BlockStateBase::get_weight( cand ) != 0 ) + return reinterpret_cast*>( cand ); + } + } + } + return detail::header_from_ptr_t( base, ptr, static_cast( hdr->total_size ) ); +} + +/// @brief Find the const block header for a user-data pointer. +/// Returns nullptr if ptr is out of range or the block header is invalid. +static const pmm::Block* find_block_from_user_ptr( const void* ptr ) noexcept +{ + const std::uint8_t* base = _backend.base_ptr(); + const auto* hdr = get_header_c( base ); + if constexpr ( sizeof( Block ) % address_traits::granule_size != 0 ) + { + constexpr std::size_t rounded_header_size = + static_cast( kBlockHdrGranules ) * address_traits::granule_size; + if ( ptr != nullptr && base != nullptr ) + { + const auto* raw = static_cast( ptr ); + if ( raw >= base + rounded_header_size && raw < base + static_cast( hdr->total_size ) ) + { + const std::uint8_t* cand = raw - rounded_header_size; + if ( ( static_cast( cand - base ) % address_traits::granule_size ) == 0 && + cand + sizeof( Block ) <= base + static_cast( hdr->total_size ) && + BlockStateBase::get_weight( cand ) != 0 ) + return reinterpret_cast*>( cand ); + } + } + } + return detail::header_from_ptr_t( const_cast( base ), const_cast( ptr ), + static_cast( hdr->total_size ) ); +} + +// ─── raw ↔ pptr helpers ─────────────────────────────────────── + +/// @brief Convert a raw user-data pointer returned by allocate() into a canonical public pptr. +/// Caller must ensure raw != nullptr and _initialized before calling. +/// Returns null pptr if the pointer is not within the managed region. +template static pptr make_pptr_from_raw( void* raw ) noexcept +{ + if ( raw == nullptr || !_initialized ) + return pptr(); + std::uint8_t* base = _backend.base_ptr(); + auto* raw_byte = static_cast( raw ); + if ( base == nullptr || raw_byte < base || raw_byte >= base + _backend.total_size() ) + return pptr(); + pmm::Block* blk = find_block_from_user_ptr( raw ); + if ( blk == nullptr ) + return pptr(); + index_type blk_idx = detail::block_idx_t( base, blk ); + if ( blk_idx > std::numeric_limits::max() - kBlockHdrGranules ) + return pptr(); + return pptr( static_cast( blk_idx + kBlockHdrGranules ) ); +} + +// ─── blk_raw helpers ────────────────────────────────────────── +// base + (offset - kBlockHdrGranules) * granule_size → block header before public user data. + +/// @brief Return a const pointer to the block header for the given pptr. +/// Returns nullptr if offset is invalid (would place block header before base). +template static const void* block_raw_ptr_from_pptr( pptr p ) noexcept +{ + const std::uint8_t* base = _backend.base_ptr(); + if ( p.offset() < kBlockHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kBlockHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off; +} + +/// @brief Return a mutable pointer to the block header for the given pptr. +/// Returns nullptr if offset is invalid (would place block header before base). +template static void* block_raw_mut_ptr_from_pptr( pptr p ) noexcept +{ + std::uint8_t* base = _backend.base_ptr(); + if ( p.offset() < kBlockHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kBlockHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off; +} + +template static constexpr index_type block_idx_from_pptr( pptr p ) noexcept +{ + constexpr index_type kHdrGranules = static_cast( + ( sizeof( Block ) + address_traits::granule_size - 1 ) / address_traits::granule_size ); + return static_cast( p.offset() - kHdrGranules ); +} + +template static void* raw_user_ptr_from_pptr( pptr p ) noexcept +{ + if ( p.is_null() || !_initialized ) + return nullptr; + + std::uint8_t* base = _backend.base_ptr(); + std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; + if ( byte_off + sizeof( T ) > _backend.total_size() ) + return nullptr; + return base + byte_off; +} + +template static void* raw_block_user_ptr_from_pptr( pptr p ) noexcept +{ + if ( p.is_null() || !_initialized ) + return nullptr; + + std::uint8_t* base = _backend.base_ptr(); + if constexpr ( sizeof( Block ) % address_traits::granule_size == 0 ) + { + return raw_user_ptr_from_pptr( p ); + } + else + { + constexpr index_type kHdrGranules = static_cast( + ( sizeof( Block ) + address_traits::granule_size - 1 ) / address_traits::granule_size ); + if ( p.offset() < kHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off + sizeof( Block ); + } +} + +static const char* pstringview_c_str_unlocked( pptr p ) noexcept +{ + const void* raw = raw_user_ptr_from_pptr( p ); + if ( raw == nullptr ) + return nullptr; + return static_cast( raw ) + offsetof( pstringview, str ); +} diff --git a/include/pmm/persist_memory_manager.h b/include/pmm/persist_memory_manager.h index 5149ed2b..68e7c1c1 100644 --- a/include/pmm/persist_memory_manager.h +++ b/include/pmm/persist_memory_manager.h @@ -522,8 +522,7 @@ template cla { if ( p.is_null() || !_initialized ) return; - std::uint8_t* base = _backend.base_ptr(); - void* raw = base + static_cast( p.offset() ) * address_traits::granule_size; + void* raw = raw_block_user_ptr_from_pptr( p ); deallocate( raw ); } @@ -556,14 +555,11 @@ template cla _last_error = PmmError::NotInitialized; return pptr(); } - std::uint8_t* base = _backend.base_ptr(); - detail::ManagerHeader* hdr = get_header( base ); - // blk_idx = pptr.offset - floor(sizeof(Block) / granule) - static constexpr index_type kBlkHdrFloorGran = - static_cast( sizeof( Block ) / address_traits::granule_size ); - index_type blk_idx = static_cast( p.offset() - kBlkHdrFloorGran ); - void* blk_raw = detail::block_at( base, blk_idx ); - index_type old_data_gran = BlockStateBase::get_weight( blk_raw ); + std::uint8_t* base = _backend.base_ptr(); + detail::ManagerHeader* hdr = get_header( base ); + index_type blk_idx = block_idx_from_pptr( p ); + void* blk_raw = detail::block_at( base, blk_idx ); + index_type old_data_gran = BlockStateBase::get_weight( blk_raw ); index_type new_data_gran = detail::bytes_to_granules_t( new_user_size ); if ( new_data_gran == 0 ) new_data_gran = 1; @@ -593,8 +589,6 @@ template cla } } // Fallback: allocate new + memmove + free old (under same lock). - static constexpr index_type kBlkHdrFloorGranFb = - static_cast( sizeof( Block ) / address_traits::granule_size ); index_type new_data_gran_alloc = detail::bytes_to_granules_t( new_user_size ); if ( new_data_gran_alloc == 0 ) new_data_gran_alloc = 1; @@ -629,13 +623,18 @@ template cla _last_error = PmmError::OutOfMemory; return pptr(); } - pptr new_p = make_pptr_from_raw( new_raw ); - void* new_dst = base + static_cast( new_p.offset() ) * address_traits::granule_size; - void* old_src = base + static_cast( p.offset() ) * address_traits::granule_size; + pptr new_p = make_pptr_from_raw( new_raw ); + if ( new_p.is_null() ) + { + _last_error = PmmError::InvalidPointer; + return pptr(); + } + void* new_dst = resolve_unchecked( new_p ); + void* old_src = resolve_unchecked( p ); std::size_t copy_sz = ( new_count < old_count ? new_count : old_count ) * sizeof( T ); std::memmove( new_dst, old_src, copy_sz ); // Free old block - index_type old_blk_idx = static_cast( p.offset() - kBlkHdrFloorGranFb ); + index_type old_blk_idx = block_idx_from_pptr( p ); void* old_blk_raw = detail::block_at( base, old_blk_idx ); index_type freed_w = BlockStateBase::get_weight( old_blk_raw ); if ( BlockStateBase::get_node_type( old_blk_raw ) != pmm::kNodeReadOnly ) @@ -685,8 +684,15 @@ template cla void* raw = allocate( sizeof( T ) ); if ( raw == nullptr ) return pptr(); - ::new ( raw ) T( static_cast( args )... ); - return make_pptr_from_raw( raw ); + pptr p = make_pptr_from_raw( raw ); + T* obj = resolve_unchecked( p ); + if ( obj == nullptr ) + { + deallocate( raw ); + return pptr(); + } + ::new ( obj ) T( static_cast( args )... ); + return p; } /** @@ -708,9 +714,11 @@ template cla if ( p.is_null() || !_initialized ) return; - std::uint8_t* base = _backend.base_ptr(); - void* raw = base + static_cast( p.offset() ) * address_traits::granule_size; - reinterpret_cast( raw )->~T(); + T* obj = resolve_unchecked( p ); + void* raw = raw_block_user_ptr_from_pptr( p ); + if ( obj == nullptr || raw == nullptr ) + return; + obj->~T(); deallocate( raw ); } @@ -723,29 +731,71 @@ template cla } /** - * @brief Разыменовать pptr — получить сырой указатель T*. + * @brief Быстро разыменовать pptr без проверки состояния блока. * - * Этот статический метод вызывается из `pptr::resolve()`. + * Проверяет только null, инициализацию менеджера и границы буфера. Не проверяет, + * что pptr указывает на текущий выделенный блок. * * @tparam T Тип данных. * @param p Персистентный указатель. - * @return T* — указатель на данные или nullptr при ошибке. + * @return T* — указатель на данные или nullptr при грубой ошибке адреса. */ - template static T* resolve( pptr p ) noexcept + template static T* resolve_unchecked( pptr p ) noexcept { if ( p.is_null() || !_initialized ) return nullptr; - std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - // Safety: reject out-of-bounds offsets instead of UB. - if ( byte_off + sizeof( T ) > _backend.total_size() ) + void* raw = raw_user_ptr_from_pptr( p ); + if ( raw == nullptr ) { _last_error = PmmError::InvalidPointer; return nullptr; } - return reinterpret_cast( base + byte_off ); + _last_error = PmmError::Ok; + return reinterpret_cast( raw ); } + /** + * @brief Разыменовать pptr с публичной проверкой live allocated block. + * + * Этот путь проверяет не только границы буфера, но и заголовок блока: + * pptr должен указывать на текущий занятый блок. Stale pptr после + * deallocate_typed() возвращает nullptr. + * + * @tparam T Тип данных. + * @param p Персистентный указатель. + * @return T* — указатель на данные или nullptr при ошибке. + */ + template static T* resolve_checked( pptr p ) noexcept + { + T* raw = resolve_unchecked( p ); + const void* user_raw = raw; + if ( user_raw == nullptr ) + return nullptr; + const void* blk_raw = find_block_from_user_ptr( user_raw ); + if ( blk_raw == nullptr ) + { + _last_error = PmmError::InvalidPointer; + return nullptr; + } + if ( BlockStateBase::get_weight( blk_raw ) == 0 || + BlockStateBase::get_root_offset( blk_raw ) == 0 ) + { + _last_error = PmmError::InvalidPointer; + return nullptr; + } + _last_error = PmmError::Ok; + return raw; + } + + /** + * @brief Совместимый публичный checked access. + * + * Старое имя сохранено как alias, но его семантика теперь совпадает с + * resolve_checked(). Внутренний код, которому нужен только offset->address, + * должен явно использовать resolve_unchecked(). + */ + template static T* resolve( pptr p ) noexcept { return resolve_checked( p ); } + /** * @brief Разыменовать pptr и получить указатель на i-й элемент массива. * @@ -756,7 +806,7 @@ template cla */ template static T* resolve_at( pptr p, std::size_t i ) noexcept { - T* base_elem = resolve( p ); + T* base_elem = resolve_checked( p ); return ( base_elem == nullptr ) ? nullptr : base_elem + i; } @@ -792,13 +842,7 @@ template cla * @param p Персистентный указатель. * @return true если pptr валиден (в пределах кучи), false если null или вне границ. */ - template static bool is_valid_ptr( pptr p ) noexcept - { - if ( p.is_null() || !_initialized ) - return false; - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - return byte_off + sizeof( T ) <= _backend.total_size(); - } + template static bool is_valid_ptr( pptr p ) noexcept { return resolve_checked( p ) != nullptr; } // ─── Root object API ────────────────────────────── @@ -1359,8 +1403,15 @@ template cla void* raw = allocate_unlocked( sizeof( T ) ); if ( raw == nullptr ) return pptr(); - ::new ( raw ) T( static_cast( args )... ); - return make_pptr_from_raw( raw ); + pptr p = make_pptr_from_raw( raw ); + T* obj = resolve_unchecked( p ); + if ( obj == nullptr ) + { + deallocate_unlocked( raw ); + return pptr(); + } + ::new ( obj ) T( static_cast( args )... ); + return p; } // Forest/domain registry private methods — extracted to forest_domain_mixin.inc @@ -1369,73 +1420,6 @@ template cla // Verify/repair methods — extracted to verify_repair_mixin.inc. #include "pmm/verify_repair_mixin.inc" - /// @brief Find the mutable block header for a user-data pointer (or nullptr). - static pmm::Block* find_block_from_user_ptr( void* ptr ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - detail::ManagerHeader* hdr = get_header( base ); - return detail::header_from_ptr_t( base, ptr, static_cast( hdr->total_size ) ); - } - - /// @brief Find the const block header for a user-data pointer. - /// Returns nullptr if ptr is out of range or the block header is invalid. - static const pmm::Block* find_block_from_user_ptr( const void* ptr ) noexcept - { - const std::uint8_t* base = _backend.base_ptr(); - return detail::header_from_ptr_t( - const_cast( base ), const_cast( ptr ), - static_cast( get_header_c( base )->total_size ) ); - } - - // ─── raw ↔ pptr helpers ─────────────────────────────────────── - - /// @brief Convert a raw user-data pointer returned by allocate() into a pptr. - /// Caller must ensure raw != nullptr and _initialized before calling. - /// Returns null pptr if the pointer is not within the managed region. - template static pptr make_pptr_from_raw( void* raw ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - auto* raw_byte = static_cast( raw ); - if ( raw_byte < base || raw_byte >= base + _backend.total_size() ) - return pptr(); - std::size_t byte_off = static_cast( raw_byte - base ); - std::size_t idx = byte_off / address_traits::granule_size; - if ( idx > static_cast( std::numeric_limits::max() ) ) - return pptr(); - return pptr( static_cast( idx ) ); - } - - // ─── blk_raw helpers ────────────────────────────────────────── - // base + offset * granule_size - sizeof(Block) → block header before user data. - - /// @brief Return a const pointer to the block header for the given pptr. - /// Returns nullptr if offset is invalid (would place block header before base). - template static const void* block_raw_ptr_from_pptr( pptr p ) noexcept - { - const std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - if ( byte_off < sizeof( Block ) ) - return nullptr; - std::size_t blk_off = byte_off - sizeof( Block ); - if ( blk_off + sizeof( Block ) > _backend.total_size() ) - return nullptr; - return base + blk_off; - } - - /// @brief Return a mutable pointer to the block header for the given pptr. - /// Returns nullptr if offset is invalid (would place block header before base). - template static void* block_raw_mut_ptr_from_pptr( pptr p ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - if ( byte_off < sizeof( Block ) ) - return nullptr; - std::size_t blk_off = byte_off - sizeof( Block ); - if ( blk_off + sizeof( Block ) > _backend.total_size() ) - return nullptr; - return base + blk_off; - } - // ─── Address-traits-specific layout constants ────────────────── // These compute the correct granule indices based on the actual address_traits // granule size, rather than using the hardcoded DefaultAddressTraits constants. diff --git a/include/pmm/pptr.h b/include/pmm/pptr.h index 29faef3c..14441a0d 100644 --- a/include/pmm/pptr.h +++ b/include/pmm/pptr.h @@ -169,27 +169,27 @@ class pptr /** * @brief Разыменование указателя (статическая модель). * - * Вызывает `ManagerT::resolve(*this)` без аргументов. + * Вызывает `ManagerT::resolve_checked(*this)` без аргументов. * Доступно только для менеджеров со статическим API (например, PersistMemoryManager). * * @return T& — ссылка на данные. */ - T& operator*() const noexcept { return *ManagerT::template resolve( *this ); } + T& operator*() const noexcept { return *ManagerT::template resolve_checked( *this ); } /** * @brief Доступ к членам через персистентный указатель (статическая модель). * - * Вызывает `ManagerT::resolve(*this)` без аргументов. + * Вызывает `ManagerT::resolve_checked(*this)` без аргументов. * Доступно только для менеджеров со статическим API. * * @return T* — указатель на данные. */ - T* operator->() const noexcept { return ManagerT::template resolve( *this ); } + T* operator->() const noexcept { return ManagerT::template resolve_checked( *this ); } /** * @brief Получить сырой указатель (низкоуровневый доступ). * - * Вызывает `ManagerT::resolve(*this)`. + * Вызывает `ManagerT::resolve_checked(*this)`. * Используйте `*p` или `p->field` вместо этого метода для обычных операций. * Для доступа к элементам массива используйте `ManagerT::resolve_at(p, i)`. * @@ -198,7 +198,16 @@ class pptr * * @return T* — указатель на данные или nullptr если is_null(). */ - T* resolve() const noexcept { return ManagerT::template resolve( *this ); } + T* resolve() const noexcept { return ManagerT::template resolve_checked( *this ); } + + /** + * @brief Получить сырой указатель через unchecked manager path. + * + * Проверяет только грубую адресуемость pptr. Не проверяет, что блок сейчас + * выделен. Предназначено для внутреннего кода менеджера и низкоуровневой + * диагностики, где stale/free-block access выбран явно. + */ + T* resolve_unchecked() const noexcept { return ManagerT::template resolve_unchecked( *this ); } // ─── Доступ к узлу AVL-дерева ──────────────────────────────── diff --git a/include/pmm/pstringview.h b/include/pmm/pstringview.h index fb62c686..e5602b37 100644 --- a/include/pmm/pstringview.h +++ b/include/pmm/pstringview.h @@ -239,12 +239,37 @@ template struct pstringview if ( raw == nullptr ) return psview_pptr(); - // Создаём pptr вручную из raw указателя. - std::uint8_t* base = ManagerT::backend().base_ptr(); - psview_pptr new_node( detail::ptr_to_granule_idx( base, raw ) ); + // Создаём canonical public pptr из физического raw указателя allocate(). + using address_traits = typename ManagerT::address_traits; + std::uint8_t* base = ManagerT::backend().base_ptr(); + auto* raw_ptr = static_cast( raw ); + if ( base == nullptr || raw_ptr < base + sizeof( pmm::Block ) ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + std::size_t block_byte_off = static_cast( raw_ptr - base ) - sizeof( pmm::Block ); + if ( block_byte_off % address_traits::granule_size != 0 ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + std::size_t public_idx = + block_byte_off / address_traits::granule_size + detail::kBlockHeaderGranules_t; + if ( public_idx > static_cast( address_traits::no_block ) ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + psview_pptr new_node( static_cast( public_idx ) ); - pstringview* obj = static_cast( raw ); - obj->length = len; + pstringview* obj = ManagerT::template resolve_unchecked( new_node ); + if ( obj == nullptr ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + obj->length = len; // Копируем строку включая null-terminator. std::memcpy( obj->str, s, static_cast( len ) + 1 ); diff --git a/single_include/pmm/pmm.h b/single_include/pmm/pmm.h index d7294d0a..a8350399 100644 --- a/single_include/pmm/pmm.h +++ b/single_include/pmm/pmm.h @@ -6257,27 +6257,27 @@ class pptr /** * @brief Разыменование указателя (статическая модель). * - * Вызывает `ManagerT::resolve(*this)` без аргументов. + * Вызывает `ManagerT::resolve_checked(*this)` без аргументов. * Доступно только для менеджеров со статическим API (например, PersistMemoryManager). * * @return T& — ссылка на данные. */ - T& operator*() const noexcept { return *ManagerT::template resolve( *this ); } + T& operator*() const noexcept { return *ManagerT::template resolve_checked( *this ); } /** * @brief Доступ к членам через персистентный указатель (статическая модель). * - * Вызывает `ManagerT::resolve(*this)` без аргументов. + * Вызывает `ManagerT::resolve_checked(*this)` без аргументов. * Доступно только для менеджеров со статическим API. * * @return T* — указатель на данные. */ - T* operator->() const noexcept { return ManagerT::template resolve( *this ); } + T* operator->() const noexcept { return ManagerT::template resolve_checked( *this ); } /** * @brief Получить сырой указатель (низкоуровневый доступ). * - * Вызывает `ManagerT::resolve(*this)`. + * Вызывает `ManagerT::resolve_checked(*this)`. * Используйте `*p` или `p->field` вместо этого метода для обычных операций. * Для доступа к элементам массива используйте `ManagerT::resolve_at(p, i)`. * @@ -6286,7 +6286,16 @@ class pptr * * @return T* — указатель на данные или nullptr если is_null(). */ - T* resolve() const noexcept { return ManagerT::template resolve( *this ); } + T* resolve() const noexcept { return ManagerT::template resolve_checked( *this ); } + + /** + * @brief Получить сырой указатель через unchecked manager path. + * + * Проверяет только грубую адресуемость pptr. Не проверяет, что блок сейчас + * выделен. Предназначено для внутреннего кода менеджера и низкоуровневой + * диагностики, где stale/free-block access выбран явно. + */ + T* resolve_unchecked() const noexcept { return ManagerT::template resolve_unchecked( *this ); } // ─── Доступ к узлу AVL-дерева ──────────────────────────────── @@ -6872,12 +6881,37 @@ template struct pstringview if ( raw == nullptr ) return psview_pptr(); - // Создаём pptr вручную из raw указателя. - std::uint8_t* base = ManagerT::backend().base_ptr(); - psview_pptr new_node( detail::ptr_to_granule_idx( base, raw ) ); + // Создаём canonical public pptr из физического raw указателя allocate(). + using address_traits = typename ManagerT::address_traits; + std::uint8_t* base = ManagerT::backend().base_ptr(); + auto* raw_ptr = static_cast( raw ); + if ( base == nullptr || raw_ptr < base + sizeof( pmm::Block ) ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + std::size_t block_byte_off = static_cast( raw_ptr - base ) - sizeof( pmm::Block ); + if ( block_byte_off % address_traits::granule_size != 0 ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + std::size_t public_idx = + block_byte_off / address_traits::granule_size + detail::kBlockHeaderGranules_t; + if ( public_idx > static_cast( address_traits::no_block ) ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + psview_pptr new_node( static_cast( public_idx ) ); - pstringview* obj = static_cast( raw ); - obj->length = len; + pstringview* obj = ManagerT::template resolve_unchecked( new_node ); + if ( obj == nullptr ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + obj->length = len; // Копируем строку включая null-terminator. std::memcpy( obj->str, s, static_cast( len ) + 1 ); @@ -7552,8 +7586,7 @@ template cla { if ( p.is_null() || !_initialized ) return; - std::uint8_t* base = _backend.base_ptr(); - void* raw = base + static_cast( p.offset() ) * address_traits::granule_size; + void* raw = raw_block_user_ptr_from_pptr( p ); deallocate( raw ); } @@ -7586,14 +7619,11 @@ template cla _last_error = PmmError::NotInitialized; return pptr(); } - std::uint8_t* base = _backend.base_ptr(); - detail::ManagerHeader* hdr = get_header( base ); - // blk_idx = pptr.offset - floor(sizeof(Block) / granule) - static constexpr index_type kBlkHdrFloorGran = - static_cast( sizeof( Block ) / address_traits::granule_size ); - index_type blk_idx = static_cast( p.offset() - kBlkHdrFloorGran ); - void* blk_raw = detail::block_at( base, blk_idx ); - index_type old_data_gran = BlockStateBase::get_weight( blk_raw ); + std::uint8_t* base = _backend.base_ptr(); + detail::ManagerHeader* hdr = get_header( base ); + index_type blk_idx = block_idx_from_pptr( p ); + void* blk_raw = detail::block_at( base, blk_idx ); + index_type old_data_gran = BlockStateBase::get_weight( blk_raw ); index_type new_data_gran = detail::bytes_to_granules_t( new_user_size ); if ( new_data_gran == 0 ) new_data_gran = 1; @@ -7623,8 +7653,6 @@ template cla } } // Fallback: allocate new + memmove + free old (under same lock). - static constexpr index_type kBlkHdrFloorGranFb = - static_cast( sizeof( Block ) / address_traits::granule_size ); index_type new_data_gran_alloc = detail::bytes_to_granules_t( new_user_size ); if ( new_data_gran_alloc == 0 ) new_data_gran_alloc = 1; @@ -7659,13 +7687,18 @@ template cla _last_error = PmmError::OutOfMemory; return pptr(); } - pptr new_p = make_pptr_from_raw( new_raw ); - void* new_dst = base + static_cast( new_p.offset() ) * address_traits::granule_size; - void* old_src = base + static_cast( p.offset() ) * address_traits::granule_size; + pptr new_p = make_pptr_from_raw( new_raw ); + if ( new_p.is_null() ) + { + _last_error = PmmError::InvalidPointer; + return pptr(); + } + void* new_dst = resolve_unchecked( new_p ); + void* old_src = resolve_unchecked( p ); std::size_t copy_sz = ( new_count < old_count ? new_count : old_count ) * sizeof( T ); std::memmove( new_dst, old_src, copy_sz ); // Free old block - index_type old_blk_idx = static_cast( p.offset() - kBlkHdrFloorGranFb ); + index_type old_blk_idx = block_idx_from_pptr( p ); void* old_blk_raw = detail::block_at( base, old_blk_idx ); index_type freed_w = BlockStateBase::get_weight( old_blk_raw ); if ( BlockStateBase::get_node_type( old_blk_raw ) != pmm::kNodeReadOnly ) @@ -7715,8 +7748,15 @@ template cla void* raw = allocate( sizeof( T ) ); if ( raw == nullptr ) return pptr(); - ::new ( raw ) T( static_cast( args )... ); - return make_pptr_from_raw( raw ); + pptr p = make_pptr_from_raw( raw ); + T* obj = resolve_unchecked( p ); + if ( obj == nullptr ) + { + deallocate( raw ); + return pptr(); + } + ::new ( obj ) T( static_cast( args )... ); + return p; } /** @@ -7738,9 +7778,11 @@ template cla if ( p.is_null() || !_initialized ) return; - std::uint8_t* base = _backend.base_ptr(); - void* raw = base + static_cast( p.offset() ) * address_traits::granule_size; - reinterpret_cast( raw )->~T(); + T* obj = resolve_unchecked( p ); + void* raw = raw_block_user_ptr_from_pptr( p ); + if ( obj == nullptr || raw == nullptr ) + return; + obj->~T(); deallocate( raw ); } @@ -7753,29 +7795,71 @@ template cla } /** - * @brief Разыменовать pptr — получить сырой указатель T*. + * @brief Быстро разыменовать pptr без проверки состояния блока. * - * Этот статический метод вызывается из `pptr::resolve()`. + * Проверяет только null, инициализацию менеджера и границы буфера. Не проверяет, + * что pptr указывает на текущий выделенный блок. * * @tparam T Тип данных. * @param p Персистентный указатель. - * @return T* — указатель на данные или nullptr при ошибке. + * @return T* — указатель на данные или nullptr при грубой ошибке адреса. */ - template static T* resolve( pptr p ) noexcept + template static T* resolve_unchecked( pptr p ) noexcept { if ( p.is_null() || !_initialized ) return nullptr; - std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - // Safety: reject out-of-bounds offsets instead of UB. - if ( byte_off + sizeof( T ) > _backend.total_size() ) + void* raw = raw_user_ptr_from_pptr( p ); + if ( raw == nullptr ) { _last_error = PmmError::InvalidPointer; return nullptr; } - return reinterpret_cast( base + byte_off ); + _last_error = PmmError::Ok; + return reinterpret_cast( raw ); } + /** + * @brief Разыменовать pptr с публичной проверкой live allocated block. + * + * Этот путь проверяет не только границы буфера, но и заголовок блока: + * pptr должен указывать на текущий занятый блок. Stale pptr после + * deallocate_typed() возвращает nullptr. + * + * @tparam T Тип данных. + * @param p Персистентный указатель. + * @return T* — указатель на данные или nullptr при ошибке. + */ + template static T* resolve_checked( pptr p ) noexcept + { + T* raw = resolve_unchecked( p ); + const void* user_raw = raw; + if ( user_raw == nullptr ) + return nullptr; + const void* blk_raw = find_block_from_user_ptr( user_raw ); + if ( blk_raw == nullptr ) + { + _last_error = PmmError::InvalidPointer; + return nullptr; + } + if ( BlockStateBase::get_weight( blk_raw ) == 0 || + BlockStateBase::get_root_offset( blk_raw ) == 0 ) + { + _last_error = PmmError::InvalidPointer; + return nullptr; + } + _last_error = PmmError::Ok; + return raw; + } + + /** + * @brief Совместимый публичный checked access. + * + * Старое имя сохранено как alias, но его семантика теперь совпадает с + * resolve_checked(). Внутренний код, которому нужен только offset->address, + * должен явно использовать resolve_unchecked(). + */ + template static T* resolve( pptr p ) noexcept { return resolve_checked( p ); } + /** * @brief Разыменовать pptr и получить указатель на i-й элемент массива. * @@ -7786,7 +7870,7 @@ template cla */ template static T* resolve_at( pptr p, std::size_t i ) noexcept { - T* base_elem = resolve( p ); + T* base_elem = resolve_checked( p ); return ( base_elem == nullptr ) ? nullptr : base_elem + i; } @@ -7822,13 +7906,7 @@ template cla * @param p Персистентный указатель. * @return true если pptr валиден (в пределах кучи), false если null или вне границ. */ - template static bool is_valid_ptr( pptr p ) noexcept - { - if ( p.is_null() || !_initialized ) - return false; - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - return byte_off + sizeof( T ) <= _backend.total_size(); - } + template static bool is_valid_ptr( pptr p ) noexcept { return resolve_checked( p ) != nullptr; } // ─── Root object API ────────────────────────────── @@ -8389,8 +8467,15 @@ template cla void* raw = allocate_unlocked( sizeof( T ) ); if ( raw == nullptr ) return pptr(); - ::new ( raw ) T( static_cast( args )... ); - return make_pptr_from_raw( raw ); + pptr p = make_pptr_from_raw( raw ); + T* obj = resolve_unchecked( p ); + if ( obj == nullptr ) + { + deallocate_unlocked( raw ); + return pptr(); + } + ::new ( obj ) T( static_cast( args )... ); + return p; } // Forest/domain registry private methods — extracted to forest_domain_mixin.inc @@ -8464,8 +8549,8 @@ static forest_domain* find_domain_by_symbol_unlocked( pptr symbol ) { if ( symbol.is_null() ) return nullptr; - pstringview* sym = resolve( symbol ); - if ( sym == nullptr ) + const char* sym_str = pstringview_c_str_unlocked( symbol ); + if ( sym_str == nullptr ) return nullptr; forest_registry* reg = forest_registry_root_unlocked(); if ( reg == nullptr ) @@ -8473,7 +8558,7 @@ static forest_domain* find_domain_by_symbol_unlocked( pptr symbol ) for ( std::uint16_t i = 0; i < reg->domain_count; ++i ) { if ( reg->domains[i].symbol_offset == symbol.offset() || - std::strncmp( reg->domains[i].name, sym->c_str(), detail::kForestDomainNameCapacity ) == 0 ) + std::strncmp( reg->domains[i].name, sym_str, detail::kForestDomainNameCapacity ) == 0 ) { reg->domains[i].symbol_offset = symbol.offset(); return ®->domains[i]; @@ -8591,10 +8676,10 @@ static pptr intern_symbol_unlocked( const char* s ) noexcept symbol_domain->root_offset, [&]( pptr cur ) -> int { - pstringview* obj = resolve( cur ); - return ( obj != nullptr ) ? std::strcmp( s, obj->c_str() ) : 0; + const char* cur_str = pstringview_c_str_unlocked( cur ); + return ( cur_str != nullptr ) ? std::strcmp( s, cur_str ) : 0; }, - []( pptr p ) -> pstringview* { return resolve( p ); } ); + []( pptr p ) -> const char* { return pstringview_c_str_unlocked( p ); } ); if ( !found.is_null() ) return found; @@ -8604,27 +8689,32 @@ static pptr intern_symbol_unlocked( const char* s ) noexcept if ( raw == nullptr ) return pptr(); - std::uint8_t* base = _backend.base_ptr(); - pptr new_node( detail::ptr_to_granule_idx( base, raw ) ); + pptr new_node = make_pptr_from_raw( raw ); + void* public_raw = raw_user_ptr_from_pptr( new_node ); + if ( public_raw == nullptr ) + { + deallocate_unlocked( raw ); + return pptr(); + } // Use memcpy to avoid UB on potentially misaligned raw pointer (ASan/UBSan fix). - std::memcpy( raw, &len, sizeof( len ) ); - char* str_dst = static_cast( raw ) + offsetof( pstringview, str ); + std::memcpy( public_raw, &len, sizeof( len ) ); + char* str_dst = static_cast( public_raw ) + offsetof( pstringview, str ); std::memcpy( str_dst, s, static_cast( len ) + 1 ); detail::avl_init_node( new_node ); - if ( !lock_block_permanent_unlocked( raw ) ) + if ( !lock_block_permanent_unlocked( public_raw ) ) return pptr(); // Re-derive c_str() pointer for comparisons using offset-based access. - const char* new_str = static_cast( raw ) + offsetof( pstringview, str ); + const char* new_str = static_cast( public_raw ) + offsetof( pstringview, str ); detail::avl_insert( new_node, symbol_domain->root_offset, [&]( pptr cur ) -> bool { - pstringview* cur_obj = resolve( cur ); - return ( cur_obj != nullptr ) && ( std::strcmp( new_str, cur_obj->c_str() ) < 0 ); + const char* cur_str = pstringview_c_str_unlocked( cur ); + return ( cur_str != nullptr ) && ( std::strcmp( new_str, cur_str ) < 0 ); }, - []( pptr p ) -> pstringview* { return resolve( p ); } ); + []( pptr p ) -> const char* { return pstringview_c_str_unlocked( p ); } ); return new_node; } @@ -8894,6 +8984,159 @@ static void for_each_free_block_inorder( const std::uint8_t* base, const detail: for_each_free_block_inorder( base, hdr, right_off, depth + 1, callback ); } +/// @brief Find the mutable block header for a user-data pointer (or nullptr). +static pmm::Block* find_block_from_user_ptr( void* ptr ) noexcept +{ + std::uint8_t* base = _backend.base_ptr(); + detail::ManagerHeader* hdr = get_header( base ); + if constexpr ( sizeof( Block ) % address_traits::granule_size != 0 ) + { + constexpr std::size_t rounded_header_size = + static_cast( kBlockHdrGranules ) * address_traits::granule_size; + if ( ptr != nullptr && base != nullptr ) + { + auto* raw = static_cast( ptr ); + if ( raw >= base + rounded_header_size && raw < base + static_cast( hdr->total_size ) ) + { + std::uint8_t* cand = raw - rounded_header_size; + if ( ( static_cast( cand - base ) % address_traits::granule_size ) == 0 && + cand + sizeof( Block ) <= base + static_cast( hdr->total_size ) && + BlockStateBase::get_weight( cand ) != 0 ) + return reinterpret_cast*>( cand ); + } + } + } + return detail::header_from_ptr_t( base, ptr, static_cast( hdr->total_size ) ); +} + +/// @brief Find the const block header for a user-data pointer. +/// Returns nullptr if ptr is out of range or the block header is invalid. +static const pmm::Block* find_block_from_user_ptr( const void* ptr ) noexcept +{ + const std::uint8_t* base = _backend.base_ptr(); + const auto* hdr = get_header_c( base ); + if constexpr ( sizeof( Block ) % address_traits::granule_size != 0 ) + { + constexpr std::size_t rounded_header_size = + static_cast( kBlockHdrGranules ) * address_traits::granule_size; + if ( ptr != nullptr && base != nullptr ) + { + const auto* raw = static_cast( ptr ); + if ( raw >= base + rounded_header_size && raw < base + static_cast( hdr->total_size ) ) + { + const std::uint8_t* cand = raw - rounded_header_size; + if ( ( static_cast( cand - base ) % address_traits::granule_size ) == 0 && + cand + sizeof( Block ) <= base + static_cast( hdr->total_size ) && + BlockStateBase::get_weight( cand ) != 0 ) + return reinterpret_cast*>( cand ); + } + } + } + return detail::header_from_ptr_t( const_cast( base ), const_cast( ptr ), + static_cast( hdr->total_size ) ); +} + +// ─── raw ↔ pptr helpers ─────────────────────────────────────── + +/// @brief Convert a raw user-data pointer returned by allocate() into a canonical public pptr. +/// Caller must ensure raw != nullptr and _initialized before calling. +/// Returns null pptr if the pointer is not within the managed region. +template static pptr make_pptr_from_raw( void* raw ) noexcept +{ + if ( raw == nullptr || !_initialized ) + return pptr(); + std::uint8_t* base = _backend.base_ptr(); + auto* raw_byte = static_cast( raw ); + if ( base == nullptr || raw_byte < base || raw_byte >= base + _backend.total_size() ) + return pptr(); + pmm::Block* blk = find_block_from_user_ptr( raw ); + if ( blk == nullptr ) + return pptr(); + index_type blk_idx = detail::block_idx_t( base, blk ); + if ( blk_idx > std::numeric_limits::max() - kBlockHdrGranules ) + return pptr(); + return pptr( static_cast( blk_idx + kBlockHdrGranules ) ); +} + +// ─── blk_raw helpers ────────────────────────────────────────── +// base + (offset - kBlockHdrGranules) * granule_size → block header before public user data. + +/// @brief Return a const pointer to the block header for the given pptr. +/// Returns nullptr if offset is invalid (would place block header before base). +template static const void* block_raw_ptr_from_pptr( pptr p ) noexcept +{ + const std::uint8_t* base = _backend.base_ptr(); + if ( p.offset() < kBlockHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kBlockHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off; +} + +/// @brief Return a mutable pointer to the block header for the given pptr. +/// Returns nullptr if offset is invalid (would place block header before base). +template static void* block_raw_mut_ptr_from_pptr( pptr p ) noexcept +{ + std::uint8_t* base = _backend.base_ptr(); + if ( p.offset() < kBlockHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kBlockHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off; +} + +template static constexpr index_type block_idx_from_pptr( pptr p ) noexcept +{ + constexpr index_type kHdrGranules = static_cast( + ( sizeof( Block ) + address_traits::granule_size - 1 ) / address_traits::granule_size ); + return static_cast( p.offset() - kHdrGranules ); +} + +template static void* raw_user_ptr_from_pptr( pptr p ) noexcept +{ + if ( p.is_null() || !_initialized ) + return nullptr; + + std::uint8_t* base = _backend.base_ptr(); + std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; + if ( byte_off + sizeof( T ) > _backend.total_size() ) + return nullptr; + return base + byte_off; +} + +template static void* raw_block_user_ptr_from_pptr( pptr p ) noexcept +{ + if ( p.is_null() || !_initialized ) + return nullptr; + + std::uint8_t* base = _backend.base_ptr(); + if constexpr ( sizeof( Block ) % address_traits::granule_size == 0 ) + { + return raw_user_ptr_from_pptr( p ); + } + else + { + constexpr index_type kHdrGranules = static_cast( + ( sizeof( Block ) + address_traits::granule_size - 1 ) / address_traits::granule_size ); + if ( p.offset() < kHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off + sizeof( Block ); + } +} + +static const char* pstringview_c_str_unlocked( pptr p ) noexcept +{ + const void* raw = raw_user_ptr_from_pptr( p ); + if ( raw == nullptr ) + return nullptr; + return static_cast( raw ) + offsetof( pstringview, str ); +} + // Verify/repair methods — extracted to verify_repair_mixin.inc. // ─── Verify / Repair mixin ────────────────────────────────────── // Included inside PersistMemoryManager private section. @@ -9007,73 +9250,6 @@ static void verify_forest_registry_unlocked( VerifyResult& result ) noexcept } } - /// @brief Find the mutable block header for a user-data pointer (or nullptr). - static pmm::Block* find_block_from_user_ptr( void* ptr ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - detail::ManagerHeader* hdr = get_header( base ); - return detail::header_from_ptr_t( base, ptr, static_cast( hdr->total_size ) ); - } - - /// @brief Find the const block header for a user-data pointer. - /// Returns nullptr if ptr is out of range or the block header is invalid. - static const pmm::Block* find_block_from_user_ptr( const void* ptr ) noexcept - { - const std::uint8_t* base = _backend.base_ptr(); - return detail::header_from_ptr_t( - const_cast( base ), const_cast( ptr ), - static_cast( get_header_c( base )->total_size ) ); - } - - // ─── raw ↔ pptr helpers ─────────────────────────────────────── - - /// @brief Convert a raw user-data pointer returned by allocate() into a pptr. - /// Caller must ensure raw != nullptr and _initialized before calling. - /// Returns null pptr if the pointer is not within the managed region. - template static pptr make_pptr_from_raw( void* raw ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - auto* raw_byte = static_cast( raw ); - if ( raw_byte < base || raw_byte >= base + _backend.total_size() ) - return pptr(); - std::size_t byte_off = static_cast( raw_byte - base ); - std::size_t idx = byte_off / address_traits::granule_size; - if ( idx > static_cast( std::numeric_limits::max() ) ) - return pptr(); - return pptr( static_cast( idx ) ); - } - - // ─── blk_raw helpers ────────────────────────────────────────── - // base + offset * granule_size - sizeof(Block) → block header before user data. - - /// @brief Return a const pointer to the block header for the given pptr. - /// Returns nullptr if offset is invalid (would place block header before base). - template static const void* block_raw_ptr_from_pptr( pptr p ) noexcept - { - const std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - if ( byte_off < sizeof( Block ) ) - return nullptr; - std::size_t blk_off = byte_off - sizeof( Block ); - if ( blk_off + sizeof( Block ) > _backend.total_size() ) - return nullptr; - return base + blk_off; - } - - /// @brief Return a mutable pointer to the block header for the given pptr. - /// Returns nullptr if offset is invalid (would place block header before base). - template static void* block_raw_mut_ptr_from_pptr( pptr p ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - if ( byte_off < sizeof( Block ) ) - return nullptr; - std::size_t blk_off = byte_off - sizeof( Block ); - if ( blk_off + sizeof( Block ) > _backend.total_size() ) - return nullptr; - return base + blk_off; - } - // ─── Address-traits-specific layout constants ────────────────── // These compute the correct granule indices based on the actual address_traits // granule size, rather than using the hardcoded DefaultAddressTraits constants. diff --git a/single_include/pmm/pmm_no_comments.h b/single_include/pmm/pmm_no_comments.h index 59047197..cef12dcd 100644 --- a/single_include/pmm/pmm_no_comments.h +++ b/single_include/pmm/pmm_no_comments.h @@ -3682,11 +3682,13 @@ class pptr return **this < *other; } - T& operator*() const noexcept { return *ManagerT::template resolve( *this ); } + T& operator*() const noexcept { return *ManagerT::template resolve_checked( *this ); } - T* operator->() const noexcept { return ManagerT::template resolve( *this ); } + T* operator->() const noexcept { return ManagerT::template resolve_checked( *this ); } - T* resolve() const noexcept { return ManagerT::template resolve( *this ); } + T* resolve() const noexcept { return ManagerT::template resolve_checked( *this ); } + + T* resolve_unchecked() const noexcept { return ManagerT::template resolve_unchecked( *this ); } auto& tree_node() const noexcept { return ManagerT::tree_node( *this ); } }; @@ -3960,11 +3962,36 @@ template struct pstringview if ( raw == nullptr ) return psview_pptr(); - std::uint8_t* base = ManagerT::backend().base_ptr(); - psview_pptr new_node( detail::ptr_to_granule_idx( base, raw ) ); + using address_traits = typename ManagerT::address_traits; + std::uint8_t* base = ManagerT::backend().base_ptr(); + auto* raw_ptr = static_cast( raw ); + if ( base == nullptr || raw_ptr < base + sizeof( pmm::Block ) ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + std::size_t block_byte_off = static_cast( raw_ptr - base ) - sizeof( pmm::Block ); + if ( block_byte_off % address_traits::granule_size != 0 ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + std::size_t public_idx = + block_byte_off / address_traits::granule_size + detail::kBlockHeaderGranules_t; + if ( public_idx > static_cast( address_traits::no_block ) ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + psview_pptr new_node( static_cast( public_idx ) ); - pstringview* obj = static_cast( raw ); - obj->length = len; + pstringview* obj = ManagerT::template resolve_unchecked( new_node ); + if ( obj == nullptr ) + { + ManagerT::deallocate( raw ); + return psview_pptr(); + } + obj->length = len; std::memcpy( obj->str, s, static_cast( len ) + 1 ); @@ -4388,8 +4415,7 @@ template cla { if ( p.is_null() || !_initialized ) return; - std::uint8_t* base = _backend.base_ptr(); - void* raw = base + static_cast( p.offset() ) * address_traits::granule_size; + void* raw = raw_block_user_ptr_from_pptr( p ); deallocate( raw ); } @@ -4417,14 +4443,11 @@ template cla _last_error = PmmError::NotInitialized; return pptr(); } - std::uint8_t* base = _backend.base_ptr(); - detail::ManagerHeader* hdr = get_header( base ); - - static constexpr index_type kBlkHdrFloorGran = - static_cast( sizeof( Block ) / address_traits::granule_size ); - index_type blk_idx = static_cast( p.offset() - kBlkHdrFloorGran ); - void* blk_raw = detail::block_at( base, blk_idx ); - index_type old_data_gran = BlockStateBase::get_weight( blk_raw ); + std::uint8_t* base = _backend.base_ptr(); + detail::ManagerHeader* hdr = get_header( base ); + index_type blk_idx = block_idx_from_pptr( p ); + void* blk_raw = detail::block_at( base, blk_idx ); + index_type old_data_gran = BlockStateBase::get_weight( blk_raw ); index_type new_data_gran = detail::bytes_to_granules_t( new_user_size ); if ( new_data_gran == 0 ) new_data_gran = 1; @@ -4454,8 +4477,6 @@ template cla } } - static constexpr index_type kBlkHdrFloorGranFb = - static_cast( sizeof( Block ) / address_traits::granule_size ); index_type new_data_gran_alloc = detail::bytes_to_granules_t( new_user_size ); if ( new_data_gran_alloc == 0 ) new_data_gran_alloc = 1; @@ -4490,13 +4511,18 @@ template cla _last_error = PmmError::OutOfMemory; return pptr(); } - pptr new_p = make_pptr_from_raw( new_raw ); - void* new_dst = base + static_cast( new_p.offset() ) * address_traits::granule_size; - void* old_src = base + static_cast( p.offset() ) * address_traits::granule_size; + pptr new_p = make_pptr_from_raw( new_raw ); + if ( new_p.is_null() ) + { + _last_error = PmmError::InvalidPointer; + return pptr(); + } + void* new_dst = resolve_unchecked( new_p ); + void* old_src = resolve_unchecked( p ); std::size_t copy_sz = ( new_count < old_count ? new_count : old_count ) * sizeof( T ); std::memmove( new_dst, old_src, copy_sz ); - index_type old_blk_idx = static_cast( p.offset() - kBlkHdrFloorGranFb ); + index_type old_blk_idx = block_idx_from_pptr( p ); void* old_blk_raw = detail::block_at( base, old_blk_idx ); index_type freed_w = BlockStateBase::get_weight( old_blk_raw ); if ( BlockStateBase::get_node_type( old_blk_raw ) != pmm::kNodeReadOnly ) @@ -4526,8 +4552,15 @@ template cla void* raw = allocate( sizeof( T ) ); if ( raw == nullptr ) return pptr(); - ::new ( raw ) T( static_cast( args )... ); - return make_pptr_from_raw( raw ); + pptr p = make_pptr_from_raw( raw ); + T* obj = resolve_unchecked( p ); + if ( obj == nullptr ) + { + deallocate( raw ); + return pptr(); + } + ::new ( obj ) T( static_cast( args )... ); + return p; } template static void destroy_typed( pptr p ) noexcept @@ -4537,9 +4570,11 @@ template cla if ( p.is_null() || !_initialized ) return; - std::uint8_t* base = _backend.base_ptr(); - void* raw = base + static_cast( p.offset() ) * address_traits::granule_size; - reinterpret_cast( raw )->~T(); + T* obj = resolve_unchecked( p ); + void* raw = raw_block_user_ptr_from_pptr( p ); + if ( obj == nullptr || raw == nullptr ) + return; + obj->~T(); deallocate( raw ); } @@ -4548,24 +4583,47 @@ template cla return typed_guard( create_typed( static_cast( args )... ) ); } - template static T* resolve( pptr p ) noexcept + template static T* resolve_unchecked( pptr p ) noexcept { if ( p.is_null() || !_initialized ) return nullptr; - std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - - if ( byte_off + sizeof( T ) > _backend.total_size() ) + void* raw = raw_user_ptr_from_pptr( p ); + if ( raw == nullptr ) { _last_error = PmmError::InvalidPointer; return nullptr; } - return reinterpret_cast( base + byte_off ); + _last_error = PmmError::Ok; + return reinterpret_cast( raw ); } + template static T* resolve_checked( pptr p ) noexcept + { + T* raw = resolve_unchecked( p ); + const void* user_raw = raw; + if ( user_raw == nullptr ) + return nullptr; + const void* blk_raw = find_block_from_user_ptr( user_raw ); + if ( blk_raw == nullptr ) + { + _last_error = PmmError::InvalidPointer; + return nullptr; + } + if ( BlockStateBase::get_weight( blk_raw ) == 0 || + BlockStateBase::get_root_offset( blk_raw ) == 0 ) + { + _last_error = PmmError::InvalidPointer; + return nullptr; + } + _last_error = PmmError::Ok; + return raw; + } + + template static T* resolve( pptr p ) noexcept { return resolve_checked( p ); } + template static T* resolve_at( pptr p, std::size_t i ) noexcept { - T* base_elem = resolve( p ); + T* base_elem = resolve_checked( p ); return ( base_elem == nullptr ) ? nullptr : base_elem + i; } @@ -4587,13 +4645,7 @@ template cla return pptr( static_cast( idx ) ); } - template static bool is_valid_ptr( pptr p ) noexcept - { - if ( p.is_null() || !_initialized ) - return false; - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - return byte_off + sizeof( T ) <= _backend.total_size(); - } + template static bool is_valid_ptr( pptr p ) noexcept { return resolve_checked( p ) != nullptr; } template static void set_root( pptr p ) noexcept { @@ -5070,8 +5122,15 @@ template cla void* raw = allocate_unlocked( sizeof( T ) ); if ( raw == nullptr ) return pptr(); - ::new ( raw ) T( static_cast( args )... ); - return make_pptr_from_raw( raw ); + pptr p = make_pptr_from_raw( raw ); + T* obj = resolve_unchecked( p ); + if ( obj == nullptr ) + { + deallocate_unlocked( raw ); + return pptr(); + } + ::new ( obj ) T( static_cast( args )... ); + return p; } static forest_registry* forest_registry_root_unlocked() noexcept @@ -5124,8 +5183,8 @@ static forest_domain* find_domain_by_symbol_unlocked( pptr symbol ) { if ( symbol.is_null() ) return nullptr; - pstringview* sym = resolve( symbol ); - if ( sym == nullptr ) + const char* sym_str = pstringview_c_str_unlocked( symbol ); + if ( sym_str == nullptr ) return nullptr; forest_registry* reg = forest_registry_root_unlocked(); if ( reg == nullptr ) @@ -5133,7 +5192,7 @@ static forest_domain* find_domain_by_symbol_unlocked( pptr symbol ) for ( std::uint16_t i = 0; i < reg->domain_count; ++i ) { if ( reg->domains[i].symbol_offset == symbol.offset() || - std::strncmp( reg->domains[i].name, sym->c_str(), detail::kForestDomainNameCapacity ) == 0 ) + std::strncmp( reg->domains[i].name, sym_str, detail::kForestDomainNameCapacity ) == 0 ) { reg->domains[i].symbol_offset = symbol.offset(); return ®->domains[i]; @@ -5243,10 +5302,10 @@ static pptr intern_symbol_unlocked( const char* s ) noexcept symbol_domain->root_offset, [&]( pptr cur ) -> int { - pstringview* obj = resolve( cur ); - return ( obj != nullptr ) ? std::strcmp( s, obj->c_str() ) : 0; + const char* cur_str = pstringview_c_str_unlocked( cur ); + return ( cur_str != nullptr ) ? std::strcmp( s, cur_str ) : 0; }, - []( pptr p ) -> pstringview* { return resolve( p ); } ); + []( pptr p ) -> const char* { return pstringview_c_str_unlocked( p ); } ); if ( !found.is_null() ) return found; @@ -5256,26 +5315,31 @@ static pptr intern_symbol_unlocked( const char* s ) noexcept if ( raw == nullptr ) return pptr(); - std::uint8_t* base = _backend.base_ptr(); - pptr new_node( detail::ptr_to_granule_idx( base, raw ) ); + pptr new_node = make_pptr_from_raw( raw ); + void* public_raw = raw_user_ptr_from_pptr( new_node ); + if ( public_raw == nullptr ) + { + deallocate_unlocked( raw ); + return pptr(); + } - std::memcpy( raw, &len, sizeof( len ) ); - char* str_dst = static_cast( raw ) + offsetof( pstringview, str ); + std::memcpy( public_raw, &len, sizeof( len ) ); + char* str_dst = static_cast( public_raw ) + offsetof( pstringview, str ); std::memcpy( str_dst, s, static_cast( len ) + 1 ); detail::avl_init_node( new_node ); - if ( !lock_block_permanent_unlocked( raw ) ) + if ( !lock_block_permanent_unlocked( public_raw ) ) return pptr(); - const char* new_str = static_cast( raw ) + offsetof( pstringview, str ); + const char* new_str = static_cast( public_raw ) + offsetof( pstringview, str ); detail::avl_insert( new_node, symbol_domain->root_offset, [&]( pptr cur ) -> bool { - pstringview* cur_obj = resolve( cur ); - return ( cur_obj != nullptr ) && ( std::strcmp( new_str, cur_obj->c_str() ) < 0 ); + const char* cur_str = pstringview_c_str_unlocked( cur ); + return ( cur_str != nullptr ) && ( std::strcmp( new_str, cur_str ) < 0 ); }, - []( pptr p ) -> pstringview* { return resolve( p ); } ); + []( pptr p ) -> const char* { return pstringview_c_str_unlocked( p ); } ); return new_node; } @@ -5530,6 +5594,144 @@ static void for_each_free_block_inorder( const std::uint8_t* base, const detail: for_each_free_block_inorder( base, hdr, right_off, depth + 1, callback ); } +static pmm::Block* find_block_from_user_ptr( void* ptr ) noexcept +{ + std::uint8_t* base = _backend.base_ptr(); + detail::ManagerHeader* hdr = get_header( base ); + if constexpr ( sizeof( Block ) % address_traits::granule_size != 0 ) + { + constexpr std::size_t rounded_header_size = + static_cast( kBlockHdrGranules ) * address_traits::granule_size; + if ( ptr != nullptr && base != nullptr ) + { + auto* raw = static_cast( ptr ); + if ( raw >= base + rounded_header_size && raw < base + static_cast( hdr->total_size ) ) + { + std::uint8_t* cand = raw - rounded_header_size; + if ( ( static_cast( cand - base ) % address_traits::granule_size ) == 0 && + cand + sizeof( Block ) <= base + static_cast( hdr->total_size ) && + BlockStateBase::get_weight( cand ) != 0 ) + return reinterpret_cast*>( cand ); + } + } + } + return detail::header_from_ptr_t( base, ptr, static_cast( hdr->total_size ) ); +} + +static const pmm::Block* find_block_from_user_ptr( const void* ptr ) noexcept +{ + const std::uint8_t* base = _backend.base_ptr(); + const auto* hdr = get_header_c( base ); + if constexpr ( sizeof( Block ) % address_traits::granule_size != 0 ) + { + constexpr std::size_t rounded_header_size = + static_cast( kBlockHdrGranules ) * address_traits::granule_size; + if ( ptr != nullptr && base != nullptr ) + { + const auto* raw = static_cast( ptr ); + if ( raw >= base + rounded_header_size && raw < base + static_cast( hdr->total_size ) ) + { + const std::uint8_t* cand = raw - rounded_header_size; + if ( ( static_cast( cand - base ) % address_traits::granule_size ) == 0 && + cand + sizeof( Block ) <= base + static_cast( hdr->total_size ) && + BlockStateBase::get_weight( cand ) != 0 ) + return reinterpret_cast*>( cand ); + } + } + } + return detail::header_from_ptr_t( const_cast( base ), const_cast( ptr ), + static_cast( hdr->total_size ) ); +} + +template static pptr make_pptr_from_raw( void* raw ) noexcept +{ + if ( raw == nullptr || !_initialized ) + return pptr(); + std::uint8_t* base = _backend.base_ptr(); + auto* raw_byte = static_cast( raw ); + if ( base == nullptr || raw_byte < base || raw_byte >= base + _backend.total_size() ) + return pptr(); + pmm::Block* blk = find_block_from_user_ptr( raw ); + if ( blk == nullptr ) + return pptr(); + index_type blk_idx = detail::block_idx_t( base, blk ); + if ( blk_idx > std::numeric_limits::max() - kBlockHdrGranules ) + return pptr(); + return pptr( static_cast( blk_idx + kBlockHdrGranules ) ); +} + +template static const void* block_raw_ptr_from_pptr( pptr p ) noexcept +{ + const std::uint8_t* base = _backend.base_ptr(); + if ( p.offset() < kBlockHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kBlockHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off; +} + +template static void* block_raw_mut_ptr_from_pptr( pptr p ) noexcept +{ + std::uint8_t* base = _backend.base_ptr(); + if ( p.offset() < kBlockHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kBlockHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off; +} + +template static constexpr index_type block_idx_from_pptr( pptr p ) noexcept +{ + constexpr index_type kHdrGranules = static_cast( + ( sizeof( Block ) + address_traits::granule_size - 1 ) / address_traits::granule_size ); + return static_cast( p.offset() - kHdrGranules ); +} + +template static void* raw_user_ptr_from_pptr( pptr p ) noexcept +{ + if ( p.is_null() || !_initialized ) + return nullptr; + + std::uint8_t* base = _backend.base_ptr(); + std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; + if ( byte_off + sizeof( T ) > _backend.total_size() ) + return nullptr; + return base + byte_off; +} + +template static void* raw_block_user_ptr_from_pptr( pptr p ) noexcept +{ + if ( p.is_null() || !_initialized ) + return nullptr; + + std::uint8_t* base = _backend.base_ptr(); + if constexpr ( sizeof( Block ) % address_traits::granule_size == 0 ) + { + return raw_user_ptr_from_pptr( p ); + } + else + { + constexpr index_type kHdrGranules = static_cast( + ( sizeof( Block ) + address_traits::granule_size - 1 ) / address_traits::granule_size ); + if ( p.offset() < kHdrGranules ) + return nullptr; + std::size_t blk_off = static_cast( p.offset() - kHdrGranules ) * address_traits::granule_size; + if ( blk_off + sizeof( Block ) > _backend.total_size() ) + return nullptr; + return base + blk_off + sizeof( Block ); + } +} + +static const char* pstringview_c_str_unlocked( pptr p ) noexcept +{ + const void* raw = raw_user_ptr_from_pptr( p ); + if ( raw == nullptr ) + return nullptr; + return static_cast( raw ) + offsetof( pstringview, str ); +} + static void verify_image_unlocked( VerifyResult& result ) noexcept { result.mode = RecoveryMode::Verify; @@ -5614,58 +5816,6 @@ static void verify_forest_registry_unlocked( VerifyResult& result ) noexcept } } - static pmm::Block* find_block_from_user_ptr( void* ptr ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - detail::ManagerHeader* hdr = get_header( base ); - return detail::header_from_ptr_t( base, ptr, static_cast( hdr->total_size ) ); - } - - static const pmm::Block* find_block_from_user_ptr( const void* ptr ) noexcept - { - const std::uint8_t* base = _backend.base_ptr(); - return detail::header_from_ptr_t( - const_cast( base ), const_cast( ptr ), - static_cast( get_header_c( base )->total_size ) ); - } - - template static pptr make_pptr_from_raw( void* raw ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - auto* raw_byte = static_cast( raw ); - if ( raw_byte < base || raw_byte >= base + _backend.total_size() ) - return pptr(); - std::size_t byte_off = static_cast( raw_byte - base ); - std::size_t idx = byte_off / address_traits::granule_size; - if ( idx > static_cast( std::numeric_limits::max() ) ) - return pptr(); - return pptr( static_cast( idx ) ); - } - - template static const void* block_raw_ptr_from_pptr( pptr p ) noexcept - { - const std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - if ( byte_off < sizeof( Block ) ) - return nullptr; - std::size_t blk_off = byte_off - sizeof( Block ); - if ( blk_off + sizeof( Block ) > _backend.total_size() ) - return nullptr; - return base + blk_off; - } - - template static void* block_raw_mut_ptr_from_pptr( pptr p ) noexcept - { - std::uint8_t* base = _backend.base_ptr(); - std::size_t byte_off = static_cast( p.offset() ) * address_traits::granule_size; - if ( byte_off < sizeof( Block ) ) - return nullptr; - std::size_t blk_off = byte_off - sizeof( Block ); - if ( blk_off + sizeof( Block ) > _backend.total_size() ) - return nullptr; - return base + blk_off; - } - static constexpr std::size_t kBlockHdrByteSize = ( ( sizeof( Block ) + address_traits::granule_size - 1 ) / address_traits::granule_size ) * address_traits::granule_size; diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 0003e808..caf3f4c2 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -296,6 +296,9 @@ target_compile_definitions(test_issue295_layout_compaction PRIVATE PMM_SOURCE_DI pmm_add_test(test_issue306_repo_guard_gitkeep test_issue306_repo_guard_gitkeep.cpp) target_compile_definitions(test_issue306_repo_guard_gitkeep PRIVATE PMM_SOURCE_DIR="${CMAKE_SOURCE_DIR}") +# ─── Issue 312: explicit pptr checked/unchecked access modes ──── +pmm_add_test(test_issue312_access_modes test_issue312_access_modes.cpp) + # ─── Issue 314: build graph compaction contract ───────────────── add_test( NAME test_issue314_build_graph_contract diff --git a/tests/test_issue312_access_modes.cpp b/tests/test_issue312_access_modes.cpp new file mode 100644 index 00000000..1f59b8e5 --- /dev/null +++ b/tests/test_issue312_access_modes.cpp @@ -0,0 +1,145 @@ +/** + * @file test_issue312_access_modes.cpp + * @brief Tests for explicit pptr checked/unchecked access modes. + */ + +#include "pmm/persist_memory_manager.h" +#include "pmm/pmm_presets.h" + +#include +#include +#include +#include + +namespace +{ +using Mgr = pmm::PersistMemoryManager; +using SmallMgr = pmm::PersistMemoryManager, 312>; +} // namespace + +TEST_CASE( "I312: valid pptr resolves through checked and unchecked paths", "[test_issue312]" ) +{ + REQUIRE( Mgr::create( 64 * 1024 ) ); + + Mgr::pptr p = Mgr::allocate_typed(); + REQUIRE( !p.is_null() ); + + REQUIRE( Mgr::resolve_checked( p ) != nullptr ); + REQUIRE( Mgr::resolve_unchecked( p ) != nullptr ); + REQUIRE( Mgr::resolve( p ) == Mgr::resolve_checked( p ) ); + REQUIRE( p.resolve() == Mgr::resolve_checked( p ) ); + REQUIRE( p.resolve_unchecked() == Mgr::resolve_unchecked( p ) ); + + *p = 0x312u; + REQUIRE( *Mgr::resolve_checked( p ) == 0x312u ); + + Mgr::deallocate_typed( p ); + Mgr::destroy(); +} + +TEST_CASE( "I312: SmallAddressTraits resolves to canonical non-aligned user pointer", "[test_issue312]" ) +{ + static_assert( sizeof( pmm::Block ) % pmm::SmallAddressTraits::granule_size != 0, + "SmallAddressTraits must exercise the non-aligned block-header path" ); + + REQUIRE( SmallMgr::create() ); + + SmallMgr::pptr p = SmallMgr::allocate_typed(); + REQUIRE( !p.is_null() ); + + auto* checked = SmallMgr::resolve_checked( p ); + auto* unchecked = SmallMgr::resolve_unchecked( p ); + REQUIRE( checked != nullptr ); + REQUIRE( unchecked != nullptr ); + REQUIRE( checked == unchecked ); + REQUIRE( reinterpret_cast( checked ) % alignof( std::uint32_t ) == 0 ); + REQUIRE( reinterpret_cast( checked ) % pmm::SmallAddressTraits::granule_size == 0 ); + REQUIRE( p.resolve() == checked ); + REQUIRE( p.resolve_unchecked() == unchecked ); + + *checked = 0x51312u; + REQUIRE( *unchecked == 0x51312u ); + + SmallMgr::deallocate_typed( p ); + SmallMgr::destroy(); +} + +TEST_CASE( "I312: SmallAddressTraits create_typed returns canonical public pptr", "[test_issue312]" ) +{ + static_assert( sizeof( pmm::Block ) % pmm::SmallAddressTraits::granule_size != 0, + "SmallAddressTraits must exercise the non-aligned block-header path" ); + + REQUIRE( SmallMgr::create() ); + + SmallMgr::pptr p = SmallMgr::create_typed( 0xC312u ); + REQUIRE( !p.is_null() ); + + auto* checked = SmallMgr::resolve_checked( p ); + auto* unchecked = SmallMgr::resolve_unchecked( p ); + REQUIRE( checked != nullptr ); + REQUIRE( unchecked != nullptr ); + REQUIRE( checked == unchecked ); + REQUIRE( reinterpret_cast( checked ) % pmm::SmallAddressTraits::granule_size == 0 ); + REQUIRE( *checked == 0xC312u ); + + SmallMgr::destroy_typed( p ); + SmallMgr::destroy(); +} + +TEST_CASE( "I312: SmallAddressTraits interned symbols use canonical public pptrs", "[test_issue312]" ) +{ + static_assert( sizeof( pmm::Block ) % pmm::SmallAddressTraits::granule_size != 0, + "SmallAddressTraits must exercise the non-aligned block-header path" ); + + REQUIRE( SmallMgr::create() ); + REQUIRE( SmallMgr::register_domain( "app/issue312" ) ); + + auto domain_id = SmallMgr::find_domain_by_name( "app/issue312" ); + REQUIRE( domain_id != 0 ); + + SmallMgr::pptr symbol = SmallMgr::pstringview( "app/issue312" ); + REQUIRE( !symbol.is_null() ); + REQUIRE( SmallMgr::resolve_checked( symbol ) != nullptr ); + REQUIRE( symbol->c_str() == std::string( "app/issue312" ) ); + REQUIRE( SmallMgr::find_domain_by_symbol( symbol ) == domain_id ); + + SmallMgr::destroy(); +} + +TEST_CASE( "I312: invalid out-of-range pptr is rejected by both access modes", "[test_issue312]" ) +{ + REQUIRE( Mgr::create( 64 * 1024 ) ); + + Mgr::pptr invalid( static_cast( Mgr::total_size() ) ); + + REQUIRE( Mgr::resolve_checked( invalid ) == nullptr ); + REQUIRE( Mgr::resolve_unchecked( invalid ) == nullptr ); + REQUIRE_FALSE( Mgr::is_valid_ptr( invalid ) ); + + Mgr::destroy(); +} + +TEST_CASE( "I312: stale pptr is rejected by checked access but explicit unchecked access remains raw", + "[test_issue312]" ) +{ + REQUIRE( Mgr::create( 64 * 1024 ) ); + + Mgr::pptr stale = Mgr::allocate_typed(); + REQUIRE( !stale.is_null() ); + *stale = 0xDEAD312u; + + std::uint32_t* raw_before_free = Mgr::resolve_unchecked( stale ); + REQUIRE( raw_before_free != nullptr ); + + Mgr::deallocate_typed( stale ); + + REQUIRE( Mgr::resolve_checked( stale ) == nullptr ); + REQUIRE( Mgr::resolve( stale ) == nullptr ); + REQUIRE( stale.resolve() == nullptr ); + REQUIRE_FALSE( Mgr::is_valid_ptr( stale ) ); + + REQUIRE( Mgr::resolve_unchecked( stale ) == raw_before_free ); + REQUIRE( stale.resolve_unchecked() == raw_before_free ); + + Mgr::destroy(); +}