summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYury Selivanov <yury@edgedb.com>2020-04-22 17:03:48 -0700
committerYury Selivanov <yury@edgedb.com>2020-04-22 17:03:48 -0700
commitd107c3ea800d206ef890cafa81a8520aa29c5eb1 (patch)
tree0e2a5029a5e0bf0e06f941aba9cb9b364a3e6e75
parentad137a3b4900a3cec1b2a46888f70612dff18370 (diff)
downloadimmutables-d107c3ea800d206ef890cafa81a8520aa29c5eb1.tar.gz
immutables-d107c3ea800d206ef890cafa81a8520aa29c5eb1.zip
A bunch of fixes
* Fix #26: `ifdef NDEBUG` should be `ifndef NDEBUG` * More tests for #24 * Add a `DEBUG_IMMUTABLES` env var to build debug builds
-rw-r--r--immutables/_map.c22
-rw-r--r--setup.py10
-rw-r--r--tests/test_map.py137
3 files changed, 157 insertions, 12 deletions
diff --git a/immutables/_map.c b/immutables/_map.c
index 89687d0..19ce760 100644
--- a/immutables/_map.c
+++ b/immutables/_map.c
@@ -393,12 +393,13 @@ static MapObject *
map_update(uint64_t mutid, MapObject *o, PyObject *src);
-#ifdef NDEBUG
+#ifndef NDEBUG
static void
_map_node_array_validate(void *o)
{
assert(IS_ARRAY_NODE(o));
MapNode_Array *node = (MapNode_Array*)(o);
+ assert(node->a_count <= HAMT_ARRAY_NODE_SIZE);
Py_ssize_t i = 0, count = 0;
for (; i < HAMT_ARRAY_NODE_SIZE; i++) {
if (node->a_array[i] != NULL) {
@@ -1109,7 +1110,7 @@ map_node_bitmap_without(MapNode_Bitmap *self,
}
}
-#ifdef NDEBUG
+#ifndef NDEBUG
/* Ensure that Collision.without implementation
converts to Bitmap nodes itself.
*/
@@ -1744,6 +1745,7 @@ map_node_array_clone(MapNode_Array *node, uint64_t mutid)
Py_ssize_t i;
VALIDATE_ARRAY_NODE(node)
+ assert(node->a_count <= HAMT_ARRAY_NODE_SIZE);
/* Create a new Array node. */
clone = (MapNode_Array *)map_node_array_new(node->a_count, mutid);
@@ -1806,7 +1808,7 @@ map_node_array_assoc(MapNode_Array *self,
if (mutid != 0 && self->a_mutid == mutid) {
new_node = self;
- self->a_count++; /*must update count*/
+ self->a_count++;
Py_INCREF(self);
}
else {
@@ -2007,7 +2009,7 @@ map_node_array_without(MapNode_Array *self,
}
else {
-#ifdef NDEBUG
+#ifndef NDEBUG
if (IS_COLLISION_NODE(node)) {
assert(
(map_node_collision_count(
@@ -2102,7 +2104,9 @@ map_node_array_dump(MapNode_Array *node,
goto error;
}
- if (_map_dump_format(writer, "ArrayNode(id=%p):\n", node)) {
+ if (_map_dump_format(writer, "ArrayNode(id=%p count=%zd):\n",
+ node, node->a_count)
+ ) {
goto error;
}
@@ -2299,7 +2303,7 @@ map_iterator_bitmap_next(MapIteratorState *iter,
Py_ssize_t pos = iter->i_pos[level];
if (pos + 1 >= Py_SIZE(node)) {
-#ifdef NDEBUG
+#ifndef NDEBUG
assert(iter->i_level >= 0);
iter->i_nodes[iter->i_level] = NULL;
#endif
@@ -2336,7 +2340,7 @@ map_iterator_collision_next(MapIteratorState *iter,
Py_ssize_t pos = iter->i_pos[level];
if (pos + 1 >= Py_SIZE(node)) {
-#ifdef NDEBUG
+#ifndef NDEBUG
assert(iter->i_level >= 0);
iter->i_nodes[iter->i_level] = NULL;
#endif
@@ -2360,7 +2364,7 @@ map_iterator_array_next(MapIteratorState *iter,
Py_ssize_t pos = iter->i_pos[level];
if (pos >= HAMT_ARRAY_NODE_SIZE) {
-#ifdef NDEBUG
+#ifndef NDEBUG
assert(iter->i_level >= 0);
iter->i_nodes[iter->i_level] = NULL;
#endif
@@ -2382,7 +2386,7 @@ map_iterator_array_next(MapIteratorState *iter,
}
}
-#ifdef NDEBUG
+#ifndef NDEBUG
assert(iter->i_level >= 0);
iter->i_nodes[iter->i_level] = NULL;
#endif
diff --git a/setup.py b/setup.py
index e27586c..3749742 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,4 @@
-import os.path
+import os
import platform
import setuptools
@@ -22,11 +22,17 @@ with open(os.path.join(
if platform.python_implementation() == 'CPython':
+ if os.environ.get("DEBUG_IMMUTABLES") == '1':
+ undef_macros = ["NDEBUG"]
+ else:
+ undef_macros = None
+
ext_modules = [
setuptools.Extension(
"immutables._map",
["immutables/_map.c"],
- extra_compile_args=CFLAGS)
+ extra_compile_args=CFLAGS,
+ undef_macros=undef_macros)
]
else:
ext_modules = []
diff --git a/tests/test_map.py b/tests/test_map.py
index 0b464cf..5c4e1fb 100644
--- a/tests/test_map.py
+++ b/tests/test_map.py
@@ -240,7 +240,9 @@ class BaseMapTest:
# <Key name:E hash:362244>: 'e'
# <Key name:B hash:101>: 'b'
- def test_map_stress(self):
+
+
+ def test_map_stress_01(self):
COLLECTION_SIZE = 7000
TEST_ITERS_EVERY = 647
CRASH_HASH_EVERY = 97
@@ -330,6 +332,78 @@ class BaseMapTest:
self.assertEqual(len(h), 0)
self.assertEqual(list(h.items()), [])
+ def test_map_stress_02(self):
+ COLLECTION_SIZE = 20000
+ TEST_ITERS_EVERY = 647
+ CRASH_HASH_EVERY = 97
+ DELETE_EVERY = 3
+ CRASH_EQ_EVERY = 11
+
+ h = self.Map()
+ d = dict()
+
+ for i in range(COLLECTION_SIZE // 2):
+ key = KeyStr(i)
+
+ if not (i % CRASH_HASH_EVERY):
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ h.set(key, i)
+
+ h = h.set(key, i)
+
+ if not (i % CRASH_EQ_EVERY):
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ h.get(KeyStr(i)) # really trigger __eq__
+
+ d[key] = i
+ self.assertEqual(len(d), len(h))
+
+ if not (i % TEST_ITERS_EVERY):
+ self.assertEqual(set(h.items()), set(d.items()))
+ self.assertEqual(len(h.items()), len(d.items()))
+
+ with h.mutate() as m:
+ for i in range(COLLECTION_SIZE // 2, COLLECTION_SIZE):
+ key = KeyStr(i)
+
+ if not (i % CRASH_HASH_EVERY):
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ m[key] = i
+
+ m[key] = i
+
+ if not (i % CRASH_EQ_EVERY):
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ m[KeyStr(i)]
+
+ d[key] = i
+ self.assertEqual(len(d), len(m))
+
+ if not (i % DELETE_EVERY):
+ del m[key]
+ del d[key]
+
+ self.assertEqual(len(d), len(m))
+
+ h = m.finish()
+
+ self.assertEqual(len(h), len(d))
+ self.assertEqual(set(h.items()), set(d.items()))
+
+ with h.mutate() as m:
+ for key in list(d):
+ del d[key]
+ del m[key]
+ self.assertEqual(len(m), len(d))
+ h = m.finish()
+
+ self.assertEqual(len(h), len(d))
+ self.assertEqual(set(h.items()), set(d.items()))
+
def test_map_delete_1(self):
A = HashKey(100, 'A')
B = HashKey(101, 'B')
@@ -1235,6 +1309,67 @@ class BaseMapTest:
m2 = m.update({'a': 20})
self.assertEqual(len(m2), 2)
+ def test_map_mut_20(self):
+ # Issue 24:
+
+ h = self.Map()
+
+ for i in range(19):
+ # Create more than 16 keys to trigger the root bitmap
+ # node to be converted into an array node
+ h = h.set(HashKey(i, i), i)
+
+
+ h = h.set(HashKey(18, '18-collision'), 18)
+
+ with h.mutate() as m:
+ del m[HashKey(18, 18)]
+ del m[HashKey(18, '18-collision')]
+
+ # The pre-issue-24 code failed to update the number of array
+ # node element, so at this point it would be greater than it
+ # actually is.
+ h = m.finish()
+
+ # Any of the below operations shouldn't crash the debug build.
+ with h.mutate() as m:
+ for i in range(18):
+ del m[HashKey(i, i)]
+ h = m.finish()
+ h = h.set(HashKey(21, 21), 21)
+ h = h.set(HashKey(22, 22), 22)
+
+ def test_map_mut_21(self):
+ # Issue 24:
+ # Array nodes, while in mutation, failed to increment the
+ # internal count of elements when adding a new key to it.
+ # Because the internal count
+
+ h = self.Map()
+
+ for i in range(18):
+ # Create more than 16 keys to trigger the root bitmap
+ # node to be converted into an array node
+ h = h.set(HashKey(i, i), i)
+
+ with h.mutate() as m:
+ # Add one new key to the array node
+ m[HashKey(18, 18)] = 18
+ # Add another key -- after this the old code failed
+ # to increment the number of elements in the mutated
+ # array node.
+ m[HashKey(19, 19)] = 19
+ h = m.finish()
+
+ for i in range(20):
+ # Start deleting keys one by one. Because array node
+ # element count was accounted incorrectly (smaller by 1
+ # than it actually is, the mutation for "del h[18]" would
+ # create an empty array node, clipping the "19" key).
+ # Before the issue #24 fix, the below line would crash
+ # on i=19.
+ h = h.delete(HashKey(i, i))
+
def test_map_mut_stress(self):
COLLECTION_SIZE = 7000
TEST_ITERS_EVERY = 647