From ed43ab72b225ce0746c7eb8f704ea41891a3b4e0 Mon Sep 17 00:00:00 2001 From: invertego Date: Sun, 9 Oct 2022 14:42:48 -0700 Subject: [PATCH] nall: fix various hashset issues - Fix copy assignment operator - Prevent reserve() from reallocating/rehashing unless capacity is increasing - Prevent insert() from inserting duplicate entries - Disable remove() as needs some work and is currently unused --- nall/hashset.hpp | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/nall/hashset.hpp b/nall/hashset.hpp index 202eda57c..632b3e288 100644 --- a/nall/hashset.hpp +++ b/nall/hashset.hpp @@ -24,8 +24,8 @@ struct hashset { if(this == &source) return *this; reset(); if(source.pool) { - for(u32 n : range(source.count)) { - insert(*source.pool[n]); + for(u32 n : range(source.length)) { + if(source.pool[n]) insert(*source.pool[n]); } } return *this; @@ -63,6 +63,8 @@ struct hashset { } auto reserve(u32 size) -> void { + if(length >= size) return; + //ensure all items will fit into pool (with <= 50% load) and amortize growth size = bit::round(max(size, count << 1)); T** copy = new T*[size](); @@ -100,15 +102,20 @@ struct hashset { //double pool size when load is >= 50% if(count >= (length >> 1)) reserve(length << 1); - count++; u32 hash = value.hash() & (length - 1); - while(pool[hash]) if(++hash >= length) hash = 0; + while(pool[hash]) { + if(value == *pool[hash]) return nothing; + if(++hash >= length) hash = 0; + } + count++; pool[hash] = new T(value); return *pool[hash]; } +#if 0 + //does not work! todo: implement tombstones or rehashing to fill gaps. auto remove(const T& value) -> bool { if(!pool) return false; @@ -125,6 +132,7 @@ struct hashset { return false; } +#endif protected: T** pool = nullptr;