aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--LICENSE7
-rw-r--r--LICENSE-APACHE200
-rw-r--r--MANIFEST.in4
-rw-r--r--Makefile33
-rw-r--r--NOTICE1
-rw-r--r--README.rst123
-rw-r--r--bench.pngbin0 -> 36273 bytes
-rw-r--r--immutables/__init__.py25
-rw-r--r--immutables/_map.c4175
-rw-r--r--immutables/_map.h107
-rw-r--r--immutables/_map.pyi73
-rw-r--r--immutables/_protocols.py85
-rw-r--r--immutables/_testutils.py80
-rw-r--r--immutables/_version.py13
-rw-r--r--immutables/map.py855
-rw-r--r--immutables/py.typed1
-rw-r--r--immutables/pythoncapi_compat.h446
-rw-r--r--pyproject.toml25
-rw-r--r--setup.py93
-rw-r--r--tests/__init__.py16
-rw-r--r--tests/conftest.py13
-rw-r--r--tests/test-data/check-immu.test73
-rw-r--r--tests/test_issue24.py156
-rw-r--r--tests/test_map.py1417
-rw-r--r--tests/test_mypy.py26
-rw-r--r--tests/test_none_keys.py515
26 files changed, 8562 insertions, 0 deletions
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..15df07f
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,7 @@
+The Immutables project is provided under the Apache 2.0 license. See
+LICENSE-APACHE for the full text of the license.
+
+Additionally, this software contains the following code distributed a
+different license (refer to the specific files for details):
+
+ immutables/pythoncapi_compat.h (MIT License)
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644
index 0000000..bf911cf
--- /dev/null
+++ b/LICENSE-APACHE
@@ -0,0 +1,200 @@
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..d3f372b
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,4 @@
+recursive-include tests *.py
+recursive-include immutables *.py *.c *.h *.pyi
+include LICENSE* NOTICE README.rst bench.png
+include immutables/py.typed
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..481d190
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,33 @@
+.PHONY: rtest build test clean all
+
+
+PYTHON ?= python
+ROOT = $(dir $(realpath $(firstword $(MAKEFILE_LIST))))
+
+
+all: build
+
+build:
+ $(PYTHON) setup.py build_ext --inplace
+
+debug:
+ DEBUG_IMMUTABLES=1 $(PYTHON) setup.py build_ext --inplace
+
+test:
+ $(PYTHON) -m pytest -v
+
+rtest:
+ ~/dev/venvs/36-debug/bin/python setup.py build_ext --inplace
+
+ env PYTHONPATH=. \
+ ~/dev/venvs/36-debug/bin/python -m test.regrtest -R3:3 --testdir tests/
+
+clean:
+ find . -name '*.pyc' | xargs rm -f
+ find . -name '*.so' | xargs rm -f
+ rm -rf ./build
+ rm -rf ./dist
+ rm -rf ./*.egg-info
+
+testinstalled:
+ cd /tmp && $(PYTHON) $(ROOT)/tests/__init__.py
diff --git a/NOTICE b/NOTICE
new file mode 100644
index 0000000..8a796ab
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1 @@
+Copyright 2018-present Contributors to the immutables project.
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..e14d321
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,123 @@
+immutables
+==========
+
+.. image:: https://github.com/MagicStack/immutables/workflows/Tests/badge.svg?branch=master
+ :target: https://github.com/MagicStack/immutables/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush
+
+.. image:: https://img.shields.io/pypi/v/immutables.svg
+ :target: https://pypi.python.org/pypi/immutables
+
+An immutable mapping type for Python.
+
+The underlying datastructure is a Hash Array Mapped Trie (HAMT)
+used in Clojure, Scala, Haskell, and other functional languages.
+This implementation is used in CPython 3.7 in the ``contextvars``
+module (see `PEP 550 <https://www.python.org/dev/peps/pep-0550/>`_ and
+`PEP 567 <https://www.python.org/dev/peps/pep-0567/>`_ for more details).
+
+Immutable mappings based on HAMT have O(log N) performance for both
+``set()`` and ``get()`` operations, which is essentially O(1) for
+relatively small mappings.
+
+Below is a visualization of a simple get/set benchmark comparing
+HAMT to an immutable mapping implemented with a Python dict
+copy-on-write approach (the benchmark code is available
+`here <https://gist.github.com/1st1/292e3f0bbe43bd65ff3256f80aa2637d>`_):
+
+.. image:: bench.png
+
+
+Installation
+------------
+
+``immutables`` requires Python 3.6+ and is available on PyPI::
+
+ $ pip install immutables
+
+
+API
+---
+
+``immutables.Map`` is an unordered immutable mapping. ``Map`` objects
+are hashable, comparable, and pickleable.
+
+The ``Map`` object implements the ``collections.abc.Mapping`` ABC
+so working with it is very similar to working with Python dicts:
+
+.. code-block:: python
+
+ import immutables
+
+ map = immutables.Map(a=1, b=2)
+
+ print(map['a'])
+ # will print '1'
+
+ print(map.get('z', 100))
+ # will print '100'
+
+ print('z' in map)
+ # will print 'False'
+
+Since Maps are immutable, there is a special API for mutations that
+allow apply changes to the Map object and create new (derived) Maps:
+
+.. code-block:: python
+
+ map2 = map.set('a', 10)
+ print(map, map2)
+ # will print:
+ # <immutables.Map({'a': 1, 'b': 2})>
+ # <immutables.Map({'a': 10, 'b': 2})>
+
+ map3 = map2.delete('b')
+ print(map, map2, map3)
+ # will print:
+ # <immutables.Map({'a': 1, 'b': 2})>
+ # <immutables.Map({'a': 10, 'b': 2})>
+ # <immutables.Map({'a': 10})>
+
+Maps also implement APIs for bulk updates: ``MapMutation`` objects:
+
+.. code-block:: python
+
+ map_mutation = map.mutate()
+ map_mutation['a'] = 100
+ del map_mutation['b']
+ map_mutation.set('y', 'y')
+
+ map2 = map_mutation.finish()
+
+ print(map, map2)
+ # will print:
+ # <immutables.Map({'a': 1, 'b': 2})>
+ # <immutables.Map({'a': 100, 'y': 'y'})>
+
+``MapMutation`` objects are context managers. Here's the above example
+rewritten in a more idiomatic way:
+
+.. code-block:: python
+
+ with map.mutate() as mm:
+ mm['a'] = 100
+ del mm['b']
+ mm.set('y', 'y')
+ map2 = mm.finish()
+
+ print(map, map2)
+ # will print:
+ # <immutables.Map({'a': 1, 'b': 2})>
+ # <immutables.Map({'a': 100, 'y': 'y'})>
+
+
+Further development
+-------------------
+
+* An immutable version of Python ``set`` type with efficient
+ ``add()`` and ``discard()`` operations.
+
+
+License
+-------
+
+Apache 2.0
diff --git a/bench.png b/bench.png
new file mode 100644
index 0000000..e77ff5d
--- /dev/null
+++ b/bench.png
Binary files differ
diff --git a/immutables/__init__.py b/immutables/__init__.py
new file mode 100644
index 0000000..b8565b0
--- /dev/null
+++ b/immutables/__init__.py
@@ -0,0 +1,25 @@
+# flake8: noqa
+
+import sys
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._map import Map
+else:
+ try:
+ from ._map import Map
+ except ImportError:
+ from .map import Map
+ else:
+ import collections.abc as _abc
+ _abc.Mapping.register(Map)
+
+from ._protocols import MapKeys as MapKeys
+from ._protocols import MapValues as MapValues
+from ._protocols import MapItems as MapItems
+from ._protocols import MapMutation as MapMutation
+
+from ._version import __version__
+
+__all__ = 'Map',
diff --git a/immutables/_map.c b/immutables/_map.c
new file mode 100644
index 0000000..7e510fd
--- /dev/null
+++ b/immutables/_map.c
@@ -0,0 +1,4175 @@
+#include <stddef.h> /* For offsetof */
+#include "pythoncapi_compat.h"
+#include "_map.h"
+
+
+/*
+This file provides an implemention of an immutable mapping using the
+Hash Array Mapped Trie (or HAMT) datastructure.
+
+This design allows to have:
+
+1. Efficient copy: immutable mappings can be copied by reference,
+ making it an O(1) operation.
+
+2. Efficient mutations: due to structural sharing, only a portion of
+ the trie needs to be copied when the collection is mutated. The
+ cost of set/delete operations is O(log N).
+
+3. Efficient lookups: O(log N).
+
+(where N is number of key/value items in the immutable mapping.)
+
+
+HAMT
+====
+
+The core idea of HAMT is that the shape of the trie is encoded into the
+hashes of keys.
+
+Say we want to store a K/V pair in our mapping. First, we calculate the
+hash of K, let's say it's 19830128, or in binary:
+
+ 0b1001011101001010101110000 = 19830128
+
+Now let's partition this bit representation of the hash into blocks of
+5 bits each:
+
+ 0b00_00000_10010_11101_00101_01011_10000 = 19830128
+ (6) (5) (4) (3) (2) (1)
+
+Each block of 5 bits represents a number between 0 and 31. So if we have
+a tree that consists of nodes, each of which is an array of 32 pointers,
+those 5-bit blocks will encode a position on a single tree level.
+
+For example, storing the key K with hash 19830128, results in the following
+tree structure:
+
+ (array of 32 pointers)
+ +---+ -- +----+----+----+ -- +----+
+ root node | 0 | .. | 15 | 16 | 17 | .. | 31 | 0b10000 = 16 (1)
+ (level 1) +---+ -- +----+----+----+ -- +----+
+ |
+ +---+ -- +----+----+----+ -- +----+
+ a 2nd level node | 0 | .. | 10 | 11 | 12 | .. | 31 | 0b01011 = 11 (2)
+ +---+ -- +----+----+----+ -- +----+
+ |
+ +---+ -- +----+----+----+ -- +----+
+ a 3rd level node | 0 | .. | 04 | 05 | 06 | .. | 31 | 0b00101 = 5 (3)
+ +---+ -- +----+----+----+ -- +----+
+ |
+ +---+ -- +----+----+----+----+
+ a 4th level node | 0 | .. | 04 | 29 | 30 | 31 | 0b11101 = 29 (4)
+ +---+ -- +----+----+----+----+
+ |
+ +---+ -- +----+----+----+ -- +----+
+ a 5th level node | 0 | .. | 17 | 18 | 19 | .. | 31 | 0b10010 = 18 (5)
+ +---+ -- +----+----+----+ -- +----+
+ |
+ +--------------+
+ |
+ +---+ -- +----+----+----+ -- +----+
+ a 6th level node | 0 | .. | 15 | 16 | 17 | .. | 31 | 0b00000 = 0 (6)
+ +---+ -- +----+----+----+ -- +----+
+ |
+ V -- our value (or collision)
+
+To rehash: for a K/V pair, the hash of K encodes where in the tree V will
+be stored.
+
+To optimize memory footprint and handle hash collisions, our implementation
+uses three different types of nodes:
+
+ * A Bitmap node;
+ * An Array node;
+ * A Collision node.
+
+Because we implement an immutable dictionary, our nodes are also
+immutable. Therefore, when we need to modify a node, we copy it, and
+do that modification to the copy.
+
+
+Array Nodes
+-----------
+
+These nodes are very simple. Essentially they are arrays of 32 pointers
+we used to illustrate the high-level idea in the previous section.
+
+We use Array nodes only when we need to store more than 16 pointers
+in a single node.
+
+Array nodes do not store key objects or value objects. They are used
+only as an indirection level - their pointers point to other nodes in
+the tree.
+
+
+Bitmap Node
+-----------
+
+Allocating a new 32-pointers array for every node of our tree would be
+very expensive. Unless we store millions of keys, most of tree nodes would
+be very sparse.
+
+When we have less than 16 elements in a node, we don't want to use the
+Array node, that would mean that we waste a lot of memory. Instead,
+we can use bitmap compression and can have just as many pointers
+as we need!
+
+Bitmap nodes consist of two fields:
+
+1. An array of pointers. If a Bitmap node holds N elements, the
+ array will be of N pointers.
+
+2. A 32bit integer -- a bitmap field. If an N-th bit is set in the
+ bitmap, it means that the node has an N-th element.
+
+For example, say we need to store a 3 elements sparse array:
+
+ +---+ -- +---+ -- +----+ -- +----+
+ | 0 | .. | 4 | .. | 11 | .. | 17 |
+ +---+ -- +---+ -- +----+ -- +----+
+ | | |
+ o1 o2 o3
+
+We allocate a three-pointer Bitmap node. Its bitmap field will be
+then set to:
+
+ 0b_00100_00010_00000_10000 == (1 << 17) | (1 << 11) | (1 << 4)
+
+To check if our Bitmap node has an I-th element we can do:
+
+ bitmap & (1 << I)
+
+
+And here's a formula to calculate a position in our pointer array
+which would correspond to an I-th element:
+
+ popcount(bitmap & ((1 << I) - 1))
+
+
+Let's break it down:
+
+ * `popcount` is a function that returns a number of bits set to 1;
+
+ * `((1 << I) - 1)` is a mask to filter the bitmask to contain bits
+ set to the *right* of our bit.
+
+
+So for our 17, 11, and 4 indexes:
+
+ * bitmap & ((1 << 17) - 1) == 0b100000010000 => 2 bits are set => index is 2.
+
+ * bitmap & ((1 << 11) - 1) == 0b10000 => 1 bit is set => index is 1.
+
+ * bitmap & ((1 << 4) - 1) == 0b0 => 0 bits are set => index is 0.
+
+
+To conclude: Bitmap nodes are just like Array nodes -- they can store
+a number of pointers, but use bitmap compression to eliminate unused
+pointers.
+
+
+Bitmap nodes have two pointers for each item:
+
+ +----+----+----+----+ -- +----+----+
+ | k1 | v1 | k2 | v2 | .. | kN | vN |
+ +----+----+----+----+ -- +----+----+
+
+When kI == NULL, vI points to another tree level.
+
+When kI != NULL, the actual key object is stored in kI, and its
+value is stored in vI.
+
+
+Collision Nodes
+---------------
+
+Collision nodes are simple arrays of pointers -- two pointers per
+key/value. When there's a hash collision, say for k1/v1 and k2/v2
+we have `hash(k1)==hash(k2)`. Then our collision node will be:
+
+ +----+----+----+----+
+ | k1 | v1 | k2 | v2 |
+ +----+----+----+----+
+
+
+Tree Structure
+--------------
+
+All nodes are PyObjects.
+
+The `MapObject` object has a pointer to the root node (h_root),
+and has a length field (h_count).
+
+High-level functions accept a MapObject object and dispatch to
+lower-level functions depending on what kind of node h_root points to.
+
+
+Operations
+==========
+
+There are three fundamental operations on an immutable dictionary:
+
+1. "o.assoc(k, v)" will return a new immutable dictionary, that will be
+ a copy of "o", but with the "k/v" item set.
+
+ Functions in this file:
+
+ map_node_assoc, map_node_bitmap_assoc,
+ map_node_array_assoc, map_node_collision_assoc
+
+ `map_node_assoc` function accepts a node object, and calls
+ other functions depending on its actual type.
+
+2. "o.find(k)" will lookup key "k" in "o".
+
+ Functions:
+
+ map_node_find, map_node_bitmap_find,
+ map_node_array_find, map_node_collision_find
+
+3. "o.without(k)" will return a new immutable dictionary, that will be
+ a copy of "o", buth without the "k" key.
+
+ Functions:
+
+ map_node_without, map_node_bitmap_without,
+ map_node_array_without, map_node_collision_without
+
+
+Further Reading
+===============
+
+1. http://blog.higher-order.net/2009/09/08/understanding-clojures-persistenthashmap-deftwice.html
+
+2. http://blog.higher-order.net/2010/08/16/assoc-and-clojures-persistenthashmap-part-ii.html
+
+3. Clojure's PersistentHashMap implementation:
+ https://github.com/clojure/clojure/blob/master/src/jvm/clojure/lang/PersistentHashMap.java
+*/
+
+
+#define IS_ARRAY_NODE(node) (Py_TYPE(node) == &_Map_ArrayNode_Type)
+#define IS_BITMAP_NODE(node) (Py_TYPE(node) == &_Map_BitmapNode_Type)
+#define IS_COLLISION_NODE(node) (Py_TYPE(node) == &_Map_CollisionNode_Type)
+
+
+/* Return type for 'find' (lookup a key) functions.
+
+ * F_ERROR - an error occurred;
+ * F_NOT_FOUND - the key was not found;
+ * F_FOUND - the key was found.
+*/
+typedef enum {F_ERROR, F_NOT_FOUND, F_FOUND} map_find_t;
+
+
+/* Return type for 'without' (delete a key) functions.
+
+ * W_ERROR - an error occurred;
+ * W_NOT_FOUND - the key was not found: there's nothing to delete;
+ * W_EMPTY - the key was found: the node/tree would be empty
+ if the key is deleted;
+ * W_NEWNODE - the key was found: a new node/tree is returned
+ without that key.
+*/
+typedef enum {W_ERROR, W_NOT_FOUND, W_EMPTY, W_NEWNODE} map_without_t;
+
+
+/* Low-level iterator protocol type.
+
+ * I_ITEM - a new item has been yielded;
+ * I_END - the whole tree was visited (similar to StopIteration).
+*/
+typedef enum {I_ITEM, I_END} map_iter_t;
+
+
+#define HAMT_ARRAY_NODE_SIZE 32
+
+
+typedef struct {
+ PyObject_HEAD
+ MapNode *a_array[HAMT_ARRAY_NODE_SIZE];
+ Py_ssize_t a_count;
+ uint64_t a_mutid;
+} MapNode_Array;
+
+
+typedef struct {
+ PyObject_VAR_HEAD
+ uint64_t b_mutid;
+ uint32_t b_bitmap;
+ PyObject *b_array[1];
+} MapNode_Bitmap;
+
+
+typedef struct {
+ PyObject_VAR_HEAD
+ uint64_t c_mutid;
+ int32_t c_hash;
+ PyObject *c_array[1];
+} MapNode_Collision;
+
+
+static volatile uint64_t mutid_counter = 1;
+
+static MapNode_Bitmap *_empty_bitmap_node;
+
+
+/* Create a new HAMT immutable mapping. */
+static MapObject *
+map_new(void);
+
+/* Return a new collection based on "o", but with an additional
+ key/val pair. */
+static MapObject *
+map_assoc(MapObject *o, PyObject *key, PyObject *val);
+
+/* Return a new collection based on "o", but without "key". */
+static MapObject *
+map_without(MapObject *o, PyObject *key);
+
+/* Check if "v" is equal to "w".
+
+ Return:
+ - 0: v != w
+ - 1: v == w
+ - -1: An error occurred.
+*/
+static int
+map_eq(BaseMapObject *v, BaseMapObject *w);
+
+static map_find_t
+map_find(BaseMapObject *o, PyObject *key, PyObject **val);
+
+/* Return the size of "o"; equivalent of "len(o)". */
+static Py_ssize_t
+map_len(BaseMapObject *o);
+
+
+static MapObject *
+map_alloc(void);
+
+static MapNode *
+map_node_assoc(MapNode *node,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject *val, int* added_leaf,
+ uint64_t mutid);
+
+static map_without_t
+map_node_without(MapNode *node,
+ uint32_t shift, int32_t hash,
+ PyObject *key,
+ MapNode **new_node,
+ uint64_t mutid);
+
+static map_find_t
+map_node_find(MapNode *node,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject **val);
+
+static int
+map_node_dump(MapNode *node,
+ _PyUnicodeWriter *writer, int level);
+
+static MapNode *
+map_node_array_new(Py_ssize_t, uint64_t mutid);
+
+static MapNode *
+map_node_collision_new(int32_t hash, Py_ssize_t size, uint64_t mutid);
+
+static inline Py_ssize_t
+map_node_collision_count(MapNode_Collision *node);
+
+static int
+map_node_update(uint64_t mutid,
+ PyObject *seq,
+ MapNode *root, Py_ssize_t count,
+ MapNode **new_root, Py_ssize_t *new_count);
+
+
+static int
+map_update_inplace(uint64_t mutid, BaseMapObject *o, PyObject *src);
+
+static MapObject *
+map_update(uint64_t mutid, MapObject *o, PyObject *src);
+
+
+#if !defined(NDEBUG)
+static void
+_map_node_array_validate(void *o)
+{
+ assert(IS_ARRAY_NODE(o));
+ MapNode_Array *node = (MapNode_Array*)(o);
+ assert(node->a_count <= HAMT_ARRAY_NODE_SIZE);
+ Py_ssize_t i = 0, count = 0;
+ for (; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ if (node->a_array[i] != NULL) {
+ count++;
+ }
+ }
+ assert(count == node->a_count);
+}
+
+#define VALIDATE_ARRAY_NODE(NODE) \
+ do { _map_node_array_validate(NODE); } while (0);
+#else
+#define VALIDATE_ARRAY_NODE(NODE)
+#endif
+
+
+/* Returns -1 on error */
+static inline int32_t
+map_hash(PyObject *o)
+{
+ Py_hash_t hash = PyObject_Hash(o);
+
+#if SIZEOF_PY_HASH_T <= 4
+ return hash;
+#else
+ if (hash == -1) {
+ /* exception */
+ return -1;
+ }
+
+ /* While it's suboptimal to reduce Python's 64 bit hash to
+ 32 bits via XOR, it seems that the resulting hash function
+ is good enough (this is also how Long type is hashed in Java.)
+ Storing 10, 100, 1000 Python strings results in a relatively
+ shallow and uniform tree structure.
+
+ Please don't change this hashing algorithm, as there are many
+ tests that test some exact tree shape to cover all code paths.
+ */
+ int32_t xored = (int32_t)(hash & 0xffffffffl) ^ (int32_t)(hash >> 32);
+ return xored == -1 ? -2 : xored;
+#endif
+}
+
+static inline uint32_t
+map_mask(int32_t hash, uint32_t shift)
+{
+ return (((uint32_t)hash >> shift) & 0x01f);
+}
+
+static inline uint32_t
+map_bitpos(int32_t hash, uint32_t shift)
+{
+ return (uint32_t)1 << map_mask(hash, shift);
+}
+
+static inline uint32_t
+map_bitcount(uint32_t i)
+{
+ /* We could use native popcount instruction but that would
+ require to either add configure flags to enable SSE4.2
+ support or to detect it dynamically. Otherwise, we have
+ a risk of CPython not working properly on older hardware.
+
+ In practice, there's no observable difference in
+ performance between using a popcount instruction or the
+ following fallback code.
+
+ The algorithm is copied from:
+ https://graphics.stanford.edu/~seander/bithacks.html
+ */
+ i = i - ((i >> 1) & 0x55555555);
+ i = (i & 0x33333333) + ((i >> 2) & 0x33333333);
+ return (((i + (i >> 4)) & 0xF0F0F0F) * 0x1010101) >> 24;
+}
+
+static inline uint32_t
+map_bitindex(uint32_t bitmap, uint32_t bit)
+{
+ return map_bitcount(bitmap & (bit - 1));
+}
+
+
+/////////////////////////////////// Dump Helpers
+
+static int
+_map_dump_ident(_PyUnicodeWriter *writer, int level)
+{
+ /* Write `' ' * level` to the `writer` */
+ PyObject *str = NULL;
+ PyObject *num = NULL;
+ PyObject *res = NULL;
+ int ret = -1;
+
+ str = PyUnicode_FromString(" ");
+ if (str == NULL) {
+ goto error;
+ }
+
+ num = PyLong_FromLong((long)level);
+ if (num == NULL) {
+ goto error;
+ }
+
+ res = PyNumber_Multiply(str, num);
+ if (res == NULL) {
+ goto error;
+ }
+
+ ret = _PyUnicodeWriter_WriteStr(writer, res);
+
+error:
+ Py_XDECREF(res);
+ Py_XDECREF(str);
+ Py_XDECREF(num);
+ return ret;
+}
+
+static int
+_map_dump_format(_PyUnicodeWriter *writer, const char *format, ...)
+{
+ /* A convenient helper combining _PyUnicodeWriter_WriteStr and
+ PyUnicode_FromFormatV.
+ */
+ PyObject* msg;
+ int ret;
+
+ va_list vargs;
+#ifdef HAVE_STDARG_PROTOTYPES
+ va_start(vargs, format);
+#else
+ va_start(vargs);
+#endif
+ msg = PyUnicode_FromFormatV(format, vargs);
+ va_end(vargs);
+
+ if (msg == NULL) {
+ return -1;
+ }
+
+ ret = _PyUnicodeWriter_WriteStr(writer, msg);
+ Py_DECREF(msg);
+ return ret;
+}
+
+/////////////////////////////////// Bitmap Node
+
+
+static MapNode *
+map_node_bitmap_new(Py_ssize_t size, uint64_t mutid)
+{
+ /* Create a new bitmap node of size 'size' */
+
+ MapNode_Bitmap *node;
+ Py_ssize_t i;
+
+ assert(size >= 0);
+ assert(size % 2 == 0);
+
+ if (size == 0 && _empty_bitmap_node != NULL && mutid == 0) {
+ Py_INCREF(_empty_bitmap_node);
+ return (MapNode *)_empty_bitmap_node;
+ }
+
+ /* No freelist; allocate a new bitmap node */
+ node = PyObject_GC_NewVar(
+ MapNode_Bitmap, &_Map_BitmapNode_Type, size);
+ if (node == NULL) {
+ return NULL;
+ }
+
+ Py_SET_SIZE(node, size);
+
+ for (i = 0; i < size; i++) {
+ node->b_array[i] = NULL;
+ }
+
+ node->b_bitmap = 0;
+ node->b_mutid = mutid;
+
+ PyObject_GC_Track(node);
+
+ if (size == 0 && _empty_bitmap_node == NULL && mutid == 0) {
+ /* Since bitmap nodes are immutable, we can cache the instance
+ for size=0 and reuse it whenever we need an empty bitmap node.
+ */
+ _empty_bitmap_node = node;
+ Py_INCREF(_empty_bitmap_node);
+ }
+
+ return (MapNode *)node;
+}
+
+static inline Py_ssize_t
+map_node_bitmap_count(MapNode_Bitmap *node)
+{
+ return Py_SIZE(node) / 2;
+}
+
+static MapNode_Bitmap *
+map_node_bitmap_clone(MapNode_Bitmap *node, uint64_t mutid)
+{
+ /* Clone a bitmap node; return a new one with the same child notes. */
+
+ MapNode_Bitmap *clone;
+ Py_ssize_t i;
+
+ clone = (MapNode_Bitmap *)map_node_bitmap_new(
+ Py_SIZE(node), mutid);
+ if (clone == NULL) {
+ return NULL;
+ }
+
+ for (i = 0; i < Py_SIZE(node); i++) {
+ Py_XINCREF(node->b_array[i]);
+ clone->b_array[i] = node->b_array[i];
+ }
+
+ clone->b_bitmap = node->b_bitmap;
+ return clone;
+}
+
+static MapNode_Bitmap *
+map_node_bitmap_clone_without(MapNode_Bitmap *o, uint32_t bit, uint64_t mutid)
+{
+ assert(bit & o->b_bitmap);
+ assert(map_node_bitmap_count(o) > 1);
+
+ MapNode_Bitmap *new = (MapNode_Bitmap *)map_node_bitmap_new(
+ Py_SIZE(o) - 2, mutid);
+ if (new == NULL) {
+ return NULL;
+ }
+
+ uint32_t idx = map_bitindex(o->b_bitmap, bit);
+ uint32_t key_idx = 2 * idx;
+ uint32_t val_idx = key_idx + 1;
+ uint32_t i;
+
+ for (i = 0; i < key_idx; i++) {
+ Py_XINCREF(o->b_array[i]);
+ new->b_array[i] = o->b_array[i];
+ }
+
+ assert(Py_SIZE(o) >= 0 && Py_SIZE(o) <= 32);
+ for (i = val_idx + 1; i < (uint32_t)Py_SIZE(o); i++) {
+ Py_XINCREF(o->b_array[i]);
+ new->b_array[i - 2] = o->b_array[i];
+ }
+
+ new->b_bitmap = o->b_bitmap & ~bit;
+ return new;
+}
+
+static MapNode *
+map_node_new_bitmap_or_collision(uint32_t shift,
+ PyObject *key1, PyObject *val1,
+ int32_t key2_hash,
+ PyObject *key2, PyObject *val2,
+ uint64_t mutid)
+{
+ /* Helper method. Creates a new node for key1/val and key2/val2
+ pairs.
+
+ If key1 hash is equal to the hash of key2, a Collision node
+ will be created. If they are not equal, a Bitmap node is
+ created.
+ */
+
+ int32_t key1_hash = map_hash(key1);
+ if (key1_hash == -1) {
+ return NULL;
+ }
+
+ if (key1_hash == key2_hash) {
+ MapNode_Collision *n;
+ n = (MapNode_Collision *)map_node_collision_new(key1_hash, 4, mutid);
+ if (n == NULL) {
+ return NULL;
+ }
+
+ Py_INCREF(key1);
+ n->c_array[0] = key1;
+ Py_INCREF(val1);
+ n->c_array[1] = val1;
+
+ Py_INCREF(key2);
+ n->c_array[2] = key2;
+ Py_INCREF(val2);
+ n->c_array[3] = val2;
+
+ return (MapNode *)n;
+ }
+ else {
+ int added_leaf = 0;
+ MapNode *n = map_node_bitmap_new(0, mutid);
+ if (n == NULL) {
+ return NULL;
+ }
+
+ MapNode *n2 = map_node_assoc(
+ n, shift, key1_hash, key1, val1, &added_leaf, mutid);
+ Py_DECREF(n);
+ if (n2 == NULL) {
+ return NULL;
+ }
+
+ n = map_node_assoc(
+ n2, shift, key2_hash, key2, val2, &added_leaf, mutid);
+ Py_DECREF(n2);
+ if (n == NULL) {
+ return NULL;
+ }
+
+ return n;
+ }
+}
+
+static MapNode *
+map_node_bitmap_assoc(MapNode_Bitmap *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject *val, int* added_leaf,
+ uint64_t mutid)
+{
+ /* assoc operation for bitmap nodes.
+
+ Return: a new node, or self if key/val already is in the
+ collection.
+
+ 'added_leaf' is later used in 'map_assoc' to determine if
+ `map.set(key, val)` increased the size of the collection.
+ */
+
+ uint32_t bit = map_bitpos(hash, shift);
+ uint32_t idx = map_bitindex(self->b_bitmap, bit);
+
+ /* Bitmap node layout:
+
+ +------+------+------+------+ --- +------+------+
+ | key1 | val1 | key2 | val2 | ... | keyN | valN |
+ +------+------+------+------+ --- +------+------+
+ where `N < Py_SIZE(node)`.
+
+ The `node->b_bitmap` field is a bitmap. For a given
+ `(shift, hash)` pair we can determine:
+
+ - If this node has the corresponding key/val slots.
+ - The index of key/val slots.
+ */
+
+ if (self->b_bitmap & bit) {
+ /* The key is set in this node */
+
+ uint32_t key_idx = 2 * idx;
+ uint32_t val_idx = key_idx + 1;
+
+ assert(val_idx < (size_t)Py_SIZE(self));
+
+ PyObject *key_or_null = self->b_array[key_idx];
+ PyObject *val_or_node = self->b_array[val_idx];
+
+ if (key_or_null == NULL) {
+ /* key is NULL. This means that we have a few keys
+ that have the same (hash, shift) pair. */
+
+ assert(val_or_node != NULL);
+
+ MapNode *sub_node = map_node_assoc(
+ (MapNode *)val_or_node,
+ shift + 5, hash, key, val, added_leaf,
+ mutid);
+ if (sub_node == NULL) {
+ return NULL;
+ }
+
+ if (val_or_node == (PyObject *)sub_node) {
+ Py_DECREF(sub_node);
+ Py_INCREF(self);
+ return (MapNode *)self;
+ }
+
+ if (mutid != 0 && self->b_mutid == mutid) {
+ Py_SETREF(self->b_array[val_idx], (PyObject*)sub_node);
+ Py_INCREF(self);
+ return (MapNode *)self;
+ }
+ else {
+ MapNode_Bitmap *ret = map_node_bitmap_clone(self, mutid);
+ if (ret == NULL) {
+ return NULL;
+ }
+ Py_SETREF(ret->b_array[val_idx], (PyObject*)sub_node);
+ return (MapNode *)ret;
+ }
+ }
+
+ assert(key != NULL);
+ /* key is not NULL. This means that we have only one other
+ key in this collection that matches our hash for this shift. */
+
+ int comp_err = PyObject_RichCompareBool(key, key_or_null, Py_EQ);
+ if (comp_err < 0) { /* exception in __eq__ */
+ return NULL;
+ }
+ if (comp_err == 1) { /* key == key_or_null */
+ if (val == val_or_node) {
+ /* we already have the same key/val pair; return self. */
+ Py_INCREF(self);
+ return (MapNode *)self;
+ }
+
+ /* We're setting a new value for the key we had before. */
+ if (mutid != 0 && self->b_mutid == mutid) {
+ /* We've been mutating this node before: update inplace. */
+ Py_INCREF(val);
+ Py_SETREF(self->b_array[val_idx], val);
+ Py_INCREF(self);
+ return (MapNode *)self;
+ }
+ else {
+ /* Make a new bitmap node with a replaced value,
+ and return it. */
+ MapNode_Bitmap *ret = map_node_bitmap_clone(self, mutid);
+ if (ret == NULL) {
+ return NULL;
+ }
+ Py_INCREF(val);
+ Py_SETREF(ret->b_array[val_idx], val);
+ return (MapNode *)ret;
+ }
+ }
+
+ /* It's a new key, and it has the same index as *one* another key.
+ We have a collision. We need to create a new node which will
+ combine the existing key and the key we're adding.
+
+ `map_node_new_bitmap_or_collision` will either create a new
+ Collision node if the keys have identical hashes, or
+ a new Bitmap node.
+ */
+ MapNode *sub_node = map_node_new_bitmap_or_collision(
+ shift + 5,
+ key_or_null, val_or_node, /* existing key/val */
+ hash,
+ key, val, /* new key/val */
+ self->b_mutid
+ );
+ if (sub_node == NULL) {
+ return NULL;
+ }
+
+ if (mutid != 0 && self->b_mutid == mutid) {
+ Py_SETREF(self->b_array[key_idx], NULL);
+ Py_SETREF(self->b_array[val_idx], (PyObject *)sub_node);
+ Py_INCREF(self);
+
+ *added_leaf = 1;
+ return (MapNode *)self;
+ }
+ else {
+ MapNode_Bitmap *ret = map_node_bitmap_clone(self, mutid);
+ if (ret == NULL) {
+ Py_DECREF(sub_node);
+ return NULL;
+ }
+ Py_SETREF(ret->b_array[key_idx], NULL);
+ Py_SETREF(ret->b_array[val_idx], (PyObject *)sub_node);
+
+ *added_leaf = 1;
+ return (MapNode *)ret;
+ }
+ }
+ else {
+ /* There was no key before with the same (shift,hash). */
+
+ uint32_t n = map_bitcount(self->b_bitmap);
+
+ if (n >= 16) {
+ /* When we have a situation where we want to store more
+ than 16 nodes at one level of the tree, we no longer
+ want to use the Bitmap node with bitmap encoding.
+
+ Instead we start using an Array node, which has
+ simpler (faster) implementation at the expense of
+ having prealocated 32 pointers for its keys/values
+ pairs.
+
+ Small map objects (<30 keys) usually don't have any
+ Array nodes at all. Between ~30 and ~400 keys map
+ objects usually have one Array node, and usually it's
+ a root node.
+ */
+
+ uint32_t jdx = map_mask(hash, shift);
+ /* 'jdx' is the index of where the new key should be added
+ in the new Array node we're about to create. */
+
+ MapNode *empty = NULL;
+ MapNode_Array *new_node = NULL;
+ MapNode *res = NULL;
+
+ /* Create a new Array node. */
+ new_node = (MapNode_Array *)map_node_array_new(n + 1, mutid);
+ if (new_node == NULL) {
+ goto fin;
+ }
+
+ /* Create an empty bitmap node for the next
+ map_node_assoc call. */
+ empty = map_node_bitmap_new(0, mutid);
+ if (empty == NULL) {
+ goto fin;
+ }
+
+ /* Make a new bitmap node for the key/val we're adding.
+ Set that bitmap node to new-array-node[jdx]. */
+ new_node->a_array[jdx] = map_node_assoc(
+ empty, shift + 5, hash, key, val, added_leaf, mutid);
+ if (new_node->a_array[jdx] == NULL) {
+ goto fin;
+ }
+
+ /* Copy existing key/value pairs from the current Bitmap
+ node to the new Array node we've just created. */
+ Py_ssize_t i, j;
+ for (i = 0, j = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ if (((self->b_bitmap >> i) & 1) != 0) {
+ /* Ensure we don't accidentally override `jdx` element
+ we set few lines above.
+ */
+ assert(new_node->a_array[i] == NULL);
+
+ if (self->b_array[j] == NULL) {
+ new_node->a_array[i] =
+ (MapNode *)self->b_array[j + 1];
+ Py_INCREF(new_node->a_array[i]);
+ }
+ else {
+ int32_t rehash = map_hash(self->b_array[j]);
+ if (rehash == -1) {
+ goto fin;
+ }
+
+ new_node->a_array[i] = map_node_assoc(
+ empty, shift + 5,
+ rehash,
+ self->b_array[j],
+ self->b_array[j + 1],
+ added_leaf,
+ mutid);
+
+ if (new_node->a_array[i] == NULL) {
+ goto fin;
+ }
+ }
+ j += 2;
+ }
+ }
+
+ VALIDATE_ARRAY_NODE(new_node)
+
+ /* That's it! */
+ res = (MapNode *)new_node;
+
+ fin:
+ Py_XDECREF(empty);
+ if (res == NULL) {
+ Py_XDECREF(new_node);
+ }
+ return res;
+ }
+ else {
+ /* We have less than 16 keys at this level; let's just
+ create a new bitmap node out of this node with the
+ new key/val pair added. */
+
+ uint32_t key_idx = 2 * idx;
+ uint32_t val_idx = key_idx + 1;
+ uint32_t i;
+
+ *added_leaf = 1;
+
+ /* Allocate new Bitmap node which can have one more key/val
+ pair in addition to what we have already. */
+ MapNode_Bitmap *new_node =
+ (MapNode_Bitmap *)map_node_bitmap_new(2 * (n + 1), mutid);
+ if (new_node == NULL) {
+ return NULL;
+ }
+
+ /* Copy all keys/values that will be before the new key/value
+ we are adding. */
+ for (i = 0; i < key_idx; i++) {
+ Py_XINCREF(self->b_array[i]);
+ new_node->b_array[i] = self->b_array[i];
+ }
+
+ /* Set the new key/value to the new Bitmap node. */
+ Py_INCREF(key);
+ new_node->b_array[key_idx] = key;
+ Py_INCREF(val);
+ new_node->b_array[val_idx] = val;
+
+ /* Copy all keys/values that will be after the new key/value
+ we are adding. */
+ assert(Py_SIZE(self) >= 0 && Py_SIZE(self) <= 32);
+ for (i = key_idx; i < (uint32_t)Py_SIZE(self); i++) {
+ Py_XINCREF(self->b_array[i]);
+ new_node->b_array[i + 2] = self->b_array[i];
+ }
+
+ new_node->b_bitmap = self->b_bitmap | bit;
+ return (MapNode *)new_node;
+ }
+ }
+}
+
+static map_without_t
+map_node_bitmap_without(MapNode_Bitmap *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key,
+ MapNode **new_node,
+ uint64_t mutid)
+{
+ uint32_t bit = map_bitpos(hash, shift);
+ if ((self->b_bitmap & bit) == 0) {
+ return W_NOT_FOUND;
+ }
+
+ uint32_t idx = map_bitindex(self->b_bitmap, bit);
+
+ uint32_t key_idx = 2 * idx;
+ uint32_t val_idx = key_idx + 1;
+
+ PyObject *key_or_null = self->b_array[key_idx];
+ PyObject *val_or_node = self->b_array[val_idx];
+
+ if (key_or_null == NULL) {
+ /* key == NULL means that 'value' is another tree node. */
+
+ MapNode *sub_node = NULL;
+ MapNode_Bitmap *target = NULL;
+
+ map_without_t res = map_node_without(
+ (MapNode *)val_or_node,
+ shift + 5, hash, key, &sub_node,
+ mutid);
+
+ switch (res) {
+ case W_EMPTY:
+ /* It's impossible for us to receive a W_EMPTY here:
+
+ - Array nodes are converted to Bitmap nodes when
+ we delete 16th item from them;
+
+ - Collision nodes are converted to Bitmap when
+ there is one item in them;
+
+ - Bitmap node's without() inlines single-item
+ sub-nodes.
+
+ So in no situation we can have a single-item
+ Bitmap child of another Bitmap node.
+ */
+ abort();
+
+ case W_NEWNODE: {
+ assert(sub_node != NULL);
+
+ if (IS_BITMAP_NODE(sub_node)) {
+ MapNode_Bitmap *sub_tree = (MapNode_Bitmap *)sub_node;
+ if (map_node_bitmap_count(sub_tree) == 1 &&
+ sub_tree->b_array[0] != NULL)
+ {
+ /* A bitmap node with one key/value pair. Just
+ merge it into this node.
+
+ Note that we don't inline Bitmap nodes that
+ have a NULL key -- those nodes point to another
+ tree level, and we cannot simply move tree levels
+ up or down.
+ */
+
+ if (mutid != 0 && self->b_mutid == mutid) {
+ target = self;
+ Py_INCREF(target);
+ }
+ else {
+ target = map_node_bitmap_clone(self, mutid);
+ if (target == NULL) {
+ Py_DECREF(sub_node);
+ return W_ERROR;
+ }
+ }
+
+ PyObject *key = sub_tree->b_array[0];
+ PyObject *val = sub_tree->b_array[1];
+
+ Py_INCREF(key);
+ Py_XSETREF(target->b_array[key_idx], key);
+ Py_INCREF(val);
+ Py_SETREF(target->b_array[val_idx], val);
+
+ Py_DECREF(sub_tree);
+
+ *new_node = (MapNode *)target;
+ return W_NEWNODE;
+ }
+ }
+
+#if !defined(NDEBUG)
+ /* Ensure that Collision.without implementation
+ converts to Bitmap nodes itself.
+ */
+ if (IS_COLLISION_NODE(sub_node)) {
+ assert(map_node_collision_count(
+ (MapNode_Collision*)sub_node) > 1);
+ }
+#endif
+
+ if (mutid != 0 && self->b_mutid == mutid) {
+ target = self;
+ Py_INCREF(target);
+ }
+ else {
+ target = map_node_bitmap_clone(self, mutid);
+ if (target == NULL) {
+ return W_ERROR;
+ }
+ }
+
+ Py_SETREF(target->b_array[val_idx],
+ (PyObject *)sub_node); /* borrow */
+
+ *new_node = (MapNode *)target;
+ return W_NEWNODE;
+ }
+
+ case W_ERROR:
+ case W_NOT_FOUND:
+ assert(sub_node == NULL);
+ return res;
+
+ default:
+ abort();
+ }
+ }
+ else {
+ /* We have a regular key/value pair */
+
+ int cmp = PyObject_RichCompareBool(key_or_null, key, Py_EQ);
+ if (cmp < 0) {
+ return W_ERROR;
+ }
+ if (cmp == 0) {
+ return W_NOT_FOUND;
+ }
+
+ if (map_node_bitmap_count(self) == 1) {
+ return W_EMPTY;
+ }
+
+ *new_node = (MapNode *)
+ map_node_bitmap_clone_without(self, bit, mutid);
+ if (*new_node == NULL) {
+ return W_ERROR;
+ }
+
+ return W_NEWNODE;
+ }
+}
+
+static map_find_t
+map_node_bitmap_find(MapNode_Bitmap *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject **val)
+{
+ /* Lookup a key in a Bitmap node. */
+
+ uint32_t bit = map_bitpos(hash, shift);
+ uint32_t idx;
+ uint32_t key_idx;
+ uint32_t val_idx;
+ PyObject *key_or_null;
+ PyObject *val_or_node;
+ int comp_err;
+
+ if ((self->b_bitmap & bit) == 0) {
+ return F_NOT_FOUND;
+ }
+
+ idx = map_bitindex(self->b_bitmap, bit);
+ key_idx = idx * 2;
+ val_idx = key_idx + 1;
+
+ assert(val_idx < (size_t)Py_SIZE(self));
+
+ key_or_null = self->b_array[key_idx];
+ val_or_node = self->b_array[val_idx];
+
+ if (key_or_null == NULL) {
+ /* There are a few keys that have the same hash at the current shift
+ that match our key. Dispatch the lookup further down the tree. */
+ assert(val_or_node != NULL);
+ return map_node_find((MapNode *)val_or_node,
+ shift + 5, hash, key, val);
+ }
+
+ /* We have only one key -- a potential match. Let's compare if the
+ key we are looking at is equal to the key we are looking for. */
+ assert(key != NULL);
+ comp_err = PyObject_RichCompareBool(key, key_or_null, Py_EQ);
+ if (comp_err < 0) { /* exception in __eq__ */
+ return F_ERROR;
+ }
+ if (comp_err == 1) { /* key == key_or_null */
+ *val = val_or_node;
+ return F_FOUND;
+ }
+
+ return F_NOT_FOUND;
+}
+
+static int
+map_node_bitmap_traverse(MapNode_Bitmap *self, visitproc visit, void *arg)
+{
+ /* Bitmap's tp_traverse */
+
+ Py_ssize_t i;
+
+ for (i = Py_SIZE(self); --i >= 0; ) {
+ Py_VISIT(self->b_array[i]);
+ }
+
+ return 0;
+}
+
+static void
+map_node_bitmap_dealloc(MapNode_Bitmap *self)
+{
+ /* Bitmap's tp_dealloc */
+
+ Py_ssize_t len = Py_SIZE(self);
+ Py_ssize_t i;
+
+ PyObject_GC_UnTrack(self);
+ Py_TRASHCAN_SAFE_BEGIN(self)
+
+ if (len > 0) {
+ i = len;
+ while (--i >= 0) {
+ Py_XDECREF(self->b_array[i]);
+ }
+ }
+
+ Py_TYPE(self)->tp_free((PyObject *)self);
+ Py_TRASHCAN_SAFE_END(self)
+}
+
+static int
+map_node_bitmap_dump(MapNode_Bitmap *node,
+ _PyUnicodeWriter *writer, int level)
+{
+ /* Debug build: __dump__() method implementation for Bitmap nodes. */
+
+ Py_ssize_t i;
+ PyObject *tmp1;
+ PyObject *tmp2;
+
+ if (_map_dump_ident(writer, level + 1)) {
+ goto error;
+ }
+
+ if (_map_dump_format(writer, "BitmapNode(size=%zd count=%zd ",
+ Py_SIZE(node), Py_SIZE(node) / 2))
+ {
+ goto error;
+ }
+
+ tmp1 = PyLong_FromUnsignedLong(node->b_bitmap);
+ if (tmp1 == NULL) {
+ goto error;
+ }
+ tmp2 = _PyLong_Format(tmp1, 2);
+ Py_DECREF(tmp1);
+ if (tmp2 == NULL) {
+ goto error;
+ }
+ if (_map_dump_format(writer, "bitmap=%S id=%p):\n", tmp2, node)) {
+ Py_DECREF(tmp2);
+ goto error;
+ }
+ Py_DECREF(tmp2);
+
+ for (i = 0; i < Py_SIZE(node); i += 2) {
+ PyObject *key_or_null = node->b_array[i];
+ PyObject *val_or_node = node->b_array[i + 1];
+
+ if (_map_dump_ident(writer, level + 2)) {
+ goto error;
+ }
+
+ if (key_or_null == NULL) {
+ if (_map_dump_format(writer, "NULL:\n")) {
+ goto error;
+ }
+
+ if (map_node_dump((MapNode *)val_or_node,
+ writer, level + 2))
+ {
+ goto error;
+ }
+ }
+ else {
+ if (_map_dump_format(writer, "%R: %R", key_or_null,
+ val_or_node))
+ {
+ goto error;
+ }
+ }
+
+ if (_map_dump_format(writer, "\n")) {
+ goto error;
+ }
+ }
+
+ return 0;
+error:
+ return -1;
+}
+
+
+/////////////////////////////////// Collision Node
+
+
+static MapNode *
+map_node_collision_new(int32_t hash, Py_ssize_t size, uint64_t mutid)
+{
+ /* Create a new Collision node. */
+
+ MapNode_Collision *node;
+ Py_ssize_t i;
+
+ assert(size >= 4);
+ assert(size % 2 == 0);
+
+ node = PyObject_GC_NewVar(
+ MapNode_Collision, &_Map_CollisionNode_Type, size);
+ if (node == NULL) {
+ return NULL;
+ }
+
+ for (i = 0; i < size; i++) {
+ node->c_array[i] = NULL;
+ }
+
+ Py_SET_SIZE(node, size);
+ node->c_hash = hash;
+
+ node->c_mutid = mutid;
+
+ PyObject_GC_Track(node);
+ return (MapNode *)node;
+}
+
+static map_find_t
+map_node_collision_find_index(MapNode_Collision *self, PyObject *key,
+ Py_ssize_t *idx)
+{
+ /* Lookup `key` in the Collision node `self`. Set the index of the
+ found key to 'idx'. */
+
+ Py_ssize_t i;
+ PyObject *el;
+
+ for (i = 0; i < Py_SIZE(self); i += 2) {
+ el = self->c_array[i];
+
+ assert(el != NULL);
+ int cmp = PyObject_RichCompareBool(key, el, Py_EQ);
+ if (cmp < 0) {
+ return F_ERROR;
+ }
+ if (cmp == 1) {
+ *idx = i;
+ return F_FOUND;
+ }
+ }
+
+ return F_NOT_FOUND;
+}
+
+static MapNode *
+map_node_collision_assoc(MapNode_Collision *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject *val, int* added_leaf,
+ uint64_t mutid)
+{
+ /* Set a new key to this level (currently a Collision node)
+ of the tree. */
+
+ if (hash == self->c_hash) {
+ /* The hash of the 'key' we are adding matches the hash of
+ other keys in this Collision node. */
+
+ Py_ssize_t key_idx = -1;
+ map_find_t found;
+ MapNode_Collision *new_node;
+ Py_ssize_t i;
+
+ /* Let's try to lookup the new 'key', maybe we already have it. */
+ found = map_node_collision_find_index(self, key, &key_idx);
+ switch (found) {
+ case F_ERROR:
+ /* Exception. */
+ return NULL;
+
+ case F_NOT_FOUND:
+ /* This is a totally new key. Clone the current node,
+ add a new key/value to the cloned node. */
+
+ new_node = (MapNode_Collision *)map_node_collision_new(
+ self->c_hash, Py_SIZE(self) + 2, mutid);
+ if (new_node == NULL) {
+ return NULL;
+ }
+
+ for (i = 0; i < Py_SIZE(self); i++) {
+ Py_INCREF(self->c_array[i]);
+ new_node->c_array[i] = self->c_array[i];
+ }
+
+ Py_INCREF(key);
+ new_node->c_array[i] = key;
+ Py_INCREF(val);
+ new_node->c_array[i + 1] = val;
+
+ *added_leaf = 1;
+ return (MapNode *)new_node;
+
+ case F_FOUND:
+ /* There's a key which is equal to the key we are adding. */
+
+ assert(key_idx >= 0);
+ assert(key_idx < Py_SIZE(self));
+ Py_ssize_t val_idx = key_idx + 1;
+
+ if (self->c_array[val_idx] == val) {
+ /* We're setting a key/value pair that's already set. */
+ Py_INCREF(self);
+ return (MapNode *)self;
+ }
+
+ /* We need to replace old value for the key with
+ a new value. */
+
+ if (mutid != 0 && self->c_mutid == mutid) {
+ new_node = self;
+ Py_INCREF(self);
+ }
+ else {
+ /* Create a new Collision node.*/
+ new_node = (MapNode_Collision *)map_node_collision_new(
+ self->c_hash, Py_SIZE(self), mutid);
+ if (new_node == NULL) {
+ return NULL;
+ }
+
+ /* Copy all elements of the old node to the new one. */
+ for (i = 0; i < Py_SIZE(self); i++) {
+ Py_INCREF(self->c_array[i]);
+ new_node->c_array[i] = self->c_array[i];
+ }
+ }
+
+ /* Replace the old value with the new value for the our key. */
+ Py_DECREF(new_node->c_array[val_idx]);
+ Py_INCREF(val);
+ new_node->c_array[val_idx] = val;
+
+ return (MapNode *)new_node;
+
+ default:
+ abort();
+ }
+ }
+ else {
+ /* The hash of the new key is different from the hash that
+ all keys of this Collision node have.
+
+ Create a Bitmap node inplace with two children:
+ key/value pair that we're adding, and the Collision node
+ we're replacing on this tree level.
+ */
+
+ MapNode_Bitmap *new_node;
+ MapNode *assoc_res;
+
+ new_node = (MapNode_Bitmap *)map_node_bitmap_new(2, mutid);
+ if (new_node == NULL) {
+ return NULL;
+ }
+ new_node->b_bitmap = map_bitpos(self->c_hash, shift);
+ Py_INCREF(self);
+ new_node->b_array[1] = (PyObject*) self;
+
+ assoc_res = map_node_bitmap_assoc(
+ new_node, shift, hash, key, val, added_leaf, mutid);
+ Py_DECREF(new_node);
+ return assoc_res;
+ }
+}
+
+static inline Py_ssize_t
+map_node_collision_count(MapNode_Collision *node)
+{
+ return Py_SIZE(node) / 2;
+}
+
+static map_without_t
+map_node_collision_without(MapNode_Collision *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key,
+ MapNode **new_node,
+ uint64_t mutid)
+{
+ if (hash != self->c_hash) {
+ return W_NOT_FOUND;
+ }
+
+ Py_ssize_t key_idx = -1;
+ map_find_t found = map_node_collision_find_index(self, key, &key_idx);
+
+ switch (found) {
+ case F_ERROR:
+ return W_ERROR;
+
+ case F_NOT_FOUND:
+ return W_NOT_FOUND;
+
+ case F_FOUND:
+ assert(key_idx >= 0);
+ assert(key_idx < Py_SIZE(self));
+
+ Py_ssize_t new_count = map_node_collision_count(self) - 1;
+
+ if (new_count == 0) {
+ /* The node has only one key/value pair and it's for the
+ key we're trying to delete. So a new node will be empty
+ after the removal.
+ */
+ return W_EMPTY;
+ }
+
+ if (new_count == 1) {
+ /* The node has two keys, and after deletion the
+ new Collision node would have one. Collision nodes
+ with one key shouldn't exist, so convert it to a
+ Bitmap node.
+ */
+ MapNode_Bitmap *node = (MapNode_Bitmap *)
+ map_node_bitmap_new(2, mutid);
+ if (node == NULL) {
+ return W_ERROR;
+ }
+
+ if (key_idx == 0) {
+ Py_INCREF(self->c_array[2]);
+ node->b_array[0] = self->c_array[2];
+ Py_INCREF(self->c_array[3]);
+ node->b_array[1] = self->c_array[3];
+ }
+ else {
+ assert(key_idx == 2);
+ Py_INCREF(self->c_array[0]);
+ node->b_array[0] = self->c_array[0];
+ Py_INCREF(self->c_array[1]);
+ node->b_array[1] = self->c_array[1];
+ }
+
+ node->b_bitmap = map_bitpos(hash, shift);
+
+ *new_node = (MapNode *)node;
+ return W_NEWNODE;
+ }
+
+ /* Allocate a new Collision node with capacity for one
+ less key/value pair */
+ MapNode_Collision *new = (MapNode_Collision *)
+ map_node_collision_new(
+ self->c_hash, Py_SIZE(self) - 2, mutid);
+ if (new == NULL) {
+ return W_ERROR;
+ }
+
+ /* Copy all other keys from `self` to `new` */
+ Py_ssize_t i;
+ for (i = 0; i < key_idx; i++) {
+ Py_INCREF(self->c_array[i]);
+ new->c_array[i] = self->c_array[i];
+ }
+ for (i = key_idx + 2; i < Py_SIZE(self); i++) {
+ Py_INCREF(self->c_array[i]);
+ new->c_array[i - 2] = self->c_array[i];
+ }
+
+ *new_node = (MapNode*)new;
+ return W_NEWNODE;
+
+ default:
+ abort();
+ }
+}
+
+static map_find_t
+map_node_collision_find(MapNode_Collision *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject **val)
+{
+ /* Lookup `key` in the Collision node `self`. Set the value
+ for the found key to 'val'. */
+
+ Py_ssize_t idx = -1;
+ map_find_t res;
+
+ res = map_node_collision_find_index(self, key, &idx);
+ if (res == F_ERROR || res == F_NOT_FOUND) {
+ return res;
+ }
+
+ assert(idx >= 0);
+ assert(idx + 1 < Py_SIZE(self));
+
+ *val = self->c_array[idx + 1];
+ assert(*val != NULL);
+
+ return F_FOUND;
+}
+
+
+static int
+map_node_collision_traverse(MapNode_Collision *self,
+ visitproc visit, void *arg)
+{
+ /* Collision's tp_traverse */
+
+ Py_ssize_t i;
+
+ for (i = Py_SIZE(self); --i >= 0; ) {
+ Py_VISIT(self->c_array[i]);
+ }
+
+ return 0;
+}
+
+static void
+map_node_collision_dealloc(MapNode_Collision *self)
+{
+ /* Collision's tp_dealloc */
+
+ Py_ssize_t len = Py_SIZE(self);
+
+ PyObject_GC_UnTrack(self);
+ Py_TRASHCAN_SAFE_BEGIN(self)
+
+ if (len > 0) {
+
+ while (--len >= 0) {
+ Py_XDECREF(self->c_array[len]);
+ }
+ }
+
+ Py_TYPE(self)->tp_free((PyObject *)self);
+ Py_TRASHCAN_SAFE_END(self)
+}
+
+static int
+map_node_collision_dump(MapNode_Collision *node,
+ _PyUnicodeWriter *writer, int level)
+{
+ /* Debug build: __dump__() method implementation for Collision nodes. */
+
+ Py_ssize_t i;
+
+ if (_map_dump_ident(writer, level + 1)) {
+ goto error;
+ }
+
+ if (_map_dump_format(writer, "CollisionNode(size=%zd id=%p):\n",
+ Py_SIZE(node), node))
+ {
+ goto error;
+ }
+
+ for (i = 0; i < Py_SIZE(node); i += 2) {
+ PyObject *key = node->c_array[i];
+ PyObject *val = node->c_array[i + 1];
+
+ if (_map_dump_ident(writer, level + 2)) {
+ goto error;
+ }
+
+ if (_map_dump_format(writer, "%R: %R\n", key, val)) {
+ goto error;
+ }
+ }
+
+ return 0;
+error:
+ return -1;
+}
+
+
+/////////////////////////////////// Array Node
+
+
+static MapNode *
+map_node_array_new(Py_ssize_t count, uint64_t mutid)
+{
+ Py_ssize_t i;
+
+ MapNode_Array *node = PyObject_GC_New(
+ MapNode_Array, &_Map_ArrayNode_Type);
+ if (node == NULL) {
+ return NULL;
+ }
+
+ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ node->a_array[i] = NULL;
+ }
+
+ node->a_count = count;
+ node->a_mutid = mutid;
+
+ PyObject_GC_Track(node);
+ return (MapNode *)node;
+}
+
+static MapNode_Array *
+map_node_array_clone(MapNode_Array *node, uint64_t mutid)
+{
+ MapNode_Array *clone;
+ Py_ssize_t i;
+
+ VALIDATE_ARRAY_NODE(node)
+ assert(node->a_count <= HAMT_ARRAY_NODE_SIZE);
+
+ /* Create a new Array node. */
+ clone = (MapNode_Array *)map_node_array_new(node->a_count, mutid);
+ if (clone == NULL) {
+ return NULL;
+ }
+
+ /* Copy all elements from the current Array node to the new one. */
+ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ Py_XINCREF(node->a_array[i]);
+ clone->a_array[i] = node->a_array[i];
+ }
+
+ clone->a_mutid = mutid;
+
+ VALIDATE_ARRAY_NODE(clone)
+ return clone;
+}
+
+static MapNode *
+map_node_array_assoc(MapNode_Array *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject *val, int* added_leaf,
+ uint64_t mutid)
+{
+ /* Set a new key to this level (currently a Collision node)
+ of the tree.
+
+ Array nodes don't store values, they can only point to
+ other nodes. They are simple arrays of 32 BaseNode pointers/
+ */
+
+ uint32_t idx = map_mask(hash, shift);
+ MapNode *node = self->a_array[idx];
+ MapNode *child_node;
+ MapNode_Array *new_node;
+ Py_ssize_t i;
+
+ if (node == NULL) {
+ /* There's no child node for the given hash. Create a new
+ Bitmap node for this key. */
+
+ MapNode_Bitmap *empty = NULL;
+
+ /* Get an empty Bitmap node to work with. */
+ empty = (MapNode_Bitmap *)map_node_bitmap_new(0, mutid);
+ if (empty == NULL) {
+ return NULL;
+ }
+
+ /* Set key/val to the newly created empty Bitmap, thus
+ creating a new Bitmap node with our key/value pair. */
+ child_node = map_node_bitmap_assoc(
+ empty,
+ shift + 5, hash, key, val, added_leaf, mutid);
+ Py_DECREF(empty);
+ if (child_node == NULL) {
+ return NULL;
+ }
+
+ if (mutid != 0 && self->a_mutid == mutid) {
+ new_node = self;
+ self->a_count++;
+ Py_INCREF(self);
+ }
+ else {
+ /* Create a new Array node. */
+ new_node = (MapNode_Array *)map_node_array_new(
+ self->a_count + 1, mutid);
+ if (new_node == NULL) {
+ Py_DECREF(child_node);
+ return NULL;
+ }
+
+ /* Copy all elements from the current Array node to the
+ new one. */
+ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ Py_XINCREF(self->a_array[i]);
+ new_node->a_array[i] = self->a_array[i];
+ }
+ }
+
+ assert(new_node->a_array[idx] == NULL);
+ new_node->a_array[idx] = child_node; /* borrow */
+ VALIDATE_ARRAY_NODE(new_node)
+ }
+ else {
+ /* There's a child node for the given hash.
+ Set the key to it./ */
+
+ child_node = map_node_assoc(
+ node, shift + 5, hash, key, val, added_leaf, mutid);
+ if (child_node == NULL) {
+ return NULL;
+ }
+ else if (child_node == (MapNode *)self) {
+ Py_DECREF(child_node);
+ return (MapNode *)self;
+ }
+
+ if (mutid != 0 && self->a_mutid == mutid) {
+ new_node = self;
+ Py_INCREF(self);
+ }
+ else {
+ new_node = map_node_array_clone(self, mutid);
+ }
+
+ if (new_node == NULL) {
+ Py_DECREF(child_node);
+ return NULL;
+ }
+
+ Py_SETREF(new_node->a_array[idx], child_node); /* borrow */
+ VALIDATE_ARRAY_NODE(new_node)
+ }
+
+ return (MapNode *)new_node;
+}
+
+static map_without_t
+map_node_array_without(MapNode_Array *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key,
+ MapNode **new_node,
+ uint64_t mutid)
+{
+ uint32_t idx = map_mask(hash, shift);
+ MapNode *node = self->a_array[idx];
+
+ if (node == NULL) {
+ return W_NOT_FOUND;
+ }
+
+ MapNode *sub_node = NULL;
+ MapNode_Array *target = NULL;
+ map_without_t res = map_node_without(
+ (MapNode *)node,
+ shift + 5, hash, key, &sub_node, mutid);
+
+ switch (res) {
+ case W_NOT_FOUND:
+ case W_ERROR:
+ assert(sub_node == NULL);
+ return res;
+
+ case W_NEWNODE: {
+ /* We need to replace a node at the `idx` index.
+ Clone this node and replace.
+ */
+ assert(sub_node != NULL);
+
+ if (mutid != 0 && self->a_mutid == mutid) {
+ target = self;
+ Py_INCREF(self);
+ }
+ else {
+ target = map_node_array_clone(self, mutid);
+ if (target == NULL) {
+ Py_DECREF(sub_node);
+ return W_ERROR;
+ }
+ }
+
+ Py_SETREF(target->a_array[idx], sub_node); /* borrow */
+ *new_node = (MapNode*)target; /* borrow */
+ return W_NEWNODE;
+ }
+
+ case W_EMPTY: {
+ assert(sub_node == NULL);
+ /* We need to remove a node at the `idx` index.
+ Calculate the size of the replacement Array node.
+ */
+ Py_ssize_t new_count = self->a_count - 1;
+
+ if (new_count == 0) {
+ return W_EMPTY;
+ }
+
+ if (new_count >= 16) {
+ /* We convert Bitmap nodes to Array nodes, when a
+ Bitmap node needs to store more than 15 key/value
+ pairs. So we will create a new Array node if we
+ the number of key/values after deletion is still
+ greater than 15.
+ */
+
+ if (mutid != 0 && self->a_mutid == mutid) {
+ target = self;
+ Py_INCREF(self);
+ }
+ else {
+ target = map_node_array_clone(self, mutid);
+ if (target == NULL) {
+ return W_ERROR;
+ }
+ }
+
+ target->a_count = new_count;
+ Py_CLEAR(target->a_array[idx]);
+
+ *new_node = (MapNode*)target; /* borrow */
+ return W_NEWNODE;
+ }
+
+ /* New Array node would have less than 16 key/value
+ pairs. We need to create a replacement Bitmap node. */
+
+ Py_ssize_t bitmap_size = new_count * 2;
+ uint32_t bitmap = 0;
+
+ MapNode_Bitmap *new = (MapNode_Bitmap *)
+ map_node_bitmap_new(bitmap_size, mutid);
+ if (new == NULL) {
+ return W_ERROR;
+ }
+
+ Py_ssize_t new_i = 0;
+ for (uint32_t i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ if (i == idx) {
+ /* Skip the node we are deleting. */
+ continue;
+ }
+
+ MapNode *node = self->a_array[i];
+ if (node == NULL) {
+ /* Skip any missing nodes. */
+ continue;
+ }
+
+ bitmap |= 1u << i;
+
+ if (IS_BITMAP_NODE(node)) {
+ MapNode_Bitmap *child = (MapNode_Bitmap *)node;
+
+ if (map_node_bitmap_count(child) == 1 &&
+ child->b_array[0] != NULL)
+ {
+ /* node is a Bitmap with one key/value pair, just
+ merge it into the new Bitmap node we're building.
+
+ Note that we don't inline Bitmap nodes that
+ have a NULL key -- those nodes point to another
+ tree level, and we cannot simply move tree levels
+ up or down.
+ */
+ PyObject *key = child->b_array[0];
+ PyObject *val = child->b_array[1];
+
+ Py_INCREF(key);
+ new->b_array[new_i] = key;
+ Py_INCREF(val);
+ new->b_array[new_i + 1] = val;
+ }
+ else {
+ new->b_array[new_i] = NULL;
+ Py_INCREF(node);
+ new->b_array[new_i + 1] = (PyObject*)node;
+ }
+ }
+ else {
+
+#if !defined(NDEBUG)
+ if (IS_COLLISION_NODE(node)) {
+ assert(
+ (map_node_collision_count(
+ (MapNode_Collision*)node)) > 1);
+ }
+ else if (IS_ARRAY_NODE(node)) {
+ assert(((MapNode_Array*)node)->a_count >= 16);
+ }
+#endif
+
+ /* Just copy the node into our new Bitmap */
+ new->b_array[new_i] = NULL;
+ Py_INCREF(node);
+ new->b_array[new_i + 1] = (PyObject*)node;
+ }
+
+ new_i += 2;
+ }
+
+ new->b_bitmap = bitmap;
+ *new_node = (MapNode*)new; /* borrow */
+ return W_NEWNODE;
+ }
+
+ default:
+ abort();
+ }
+}
+
+static map_find_t
+map_node_array_find(MapNode_Array *self,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject **val)
+{
+ /* Lookup `key` in the Array node `self`. Set the value
+ for the found key to 'val'. */
+
+ uint32_t idx = map_mask(hash, shift);
+ MapNode *node;
+
+ node = self->a_array[idx];
+ if (node == NULL) {
+ return F_NOT_FOUND;
+ }
+
+ /* Dispatch to the generic map_node_find */
+ return map_node_find(node, shift + 5, hash, key, val);
+}
+
+static int
+map_node_array_traverse(MapNode_Array *self,
+ visitproc visit, void *arg)
+{
+ /* Array's tp_traverse */
+
+ Py_ssize_t i;
+
+ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ Py_VISIT(self->a_array[i]);
+ }
+
+ return 0;
+}
+
+static void
+map_node_array_dealloc(MapNode_Array *self)
+{
+ /* Array's tp_dealloc */
+
+ Py_ssize_t i;
+
+ PyObject_GC_UnTrack(self);
+ Py_TRASHCAN_SAFE_BEGIN(self)
+
+ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ Py_XDECREF(self->a_array[i]);
+ }
+
+ Py_TYPE(self)->tp_free((PyObject *)self);
+ Py_TRASHCAN_SAFE_END(self)
+}
+
+static int
+map_node_array_dump(MapNode_Array *node,
+ _PyUnicodeWriter *writer, int level)
+{
+ /* Debug build: __dump__() method implementation for Array nodes. */
+
+ Py_ssize_t i;
+
+ if (_map_dump_ident(writer, level + 1)) {
+ goto error;
+ }
+
+ if (_map_dump_format(writer, "ArrayNode(id=%p count=%zd):\n",
+ node, node->a_count)
+ ) {
+ goto error;
+ }
+
+ for (i = 0; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ if (node->a_array[i] == NULL) {
+ continue;
+ }
+
+ if (_map_dump_ident(writer, level + 2)) {
+ goto error;
+ }
+
+ if (_map_dump_format(writer, "%d::\n", i)) {
+ goto error;
+ }
+
+ if (map_node_dump(node->a_array[i], writer, level + 1)) {
+ goto error;
+ }
+
+ if (_map_dump_format(writer, "\n")) {
+ goto error;
+ }
+ }
+
+ return 0;
+error:
+ return -1;
+}
+
+
+/////////////////////////////////// Node Dispatch
+
+
+static MapNode *
+map_node_assoc(MapNode *node,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject *val, int* added_leaf,
+ uint64_t mutid)
+{
+ /* Set key/value to the 'node' starting with the given shift/hash.
+ Return a new node, or the same node if key/value already
+ set.
+
+ added_leaf will be set to 1 if key/value wasn't in the
+ tree before.
+
+ This method automatically dispatches to the suitable
+ map_node_{nodetype}_assoc method.
+ */
+
+ *added_leaf = 0;
+
+ if (IS_BITMAP_NODE(node)) {
+ return map_node_bitmap_assoc(
+ (MapNode_Bitmap *)node,
+ shift, hash, key, val, added_leaf, mutid);
+ }
+ else if (IS_ARRAY_NODE(node)) {
+ return map_node_array_assoc(
+ (MapNode_Array *)node,
+ shift, hash, key, val, added_leaf, mutid);
+ }
+ else {
+ assert(IS_COLLISION_NODE(node));
+ return map_node_collision_assoc(
+ (MapNode_Collision *)node,
+ shift, hash, key, val, added_leaf, mutid);
+ }
+}
+
+static map_without_t
+map_node_without(MapNode *node,
+ uint32_t shift, int32_t hash,
+ PyObject *key,
+ MapNode **new_node,
+ uint64_t mutid)
+{
+ if (IS_BITMAP_NODE(node)) {
+ return map_node_bitmap_without(
+ (MapNode_Bitmap *)node,
+ shift, hash, key,
+ new_node,
+ mutid);
+ }
+ else if (IS_ARRAY_NODE(node)) {
+ return map_node_array_without(
+ (MapNode_Array *)node,
+ shift, hash, key,
+ new_node,
+ mutid);
+ }
+ else {
+ assert(IS_COLLISION_NODE(node));
+ return map_node_collision_without(
+ (MapNode_Collision *)node,
+ shift, hash, key,
+ new_node,
+ mutid);
+ }
+}
+
+static map_find_t
+map_node_find(MapNode *node,
+ uint32_t shift, int32_t hash,
+ PyObject *key, PyObject **val)
+{
+ /* Find the key in the node starting with the given shift/hash.
+
+ If a value is found, the result will be set to F_FOUND, and
+ *val will point to the found value object.
+
+ If a value wasn't found, the result will be set to F_NOT_FOUND.
+
+ If an exception occurs during the call, the result will be F_ERROR.
+
+ This method automatically dispatches to the suitable
+ map_node_{nodetype}_find method.
+ */
+
+ if (IS_BITMAP_NODE(node)) {
+ return map_node_bitmap_find(
+ (MapNode_Bitmap *)node,
+ shift, hash, key, val);
+
+ }
+ else if (IS_ARRAY_NODE(node)) {
+ return map_node_array_find(
+ (MapNode_Array *)node,
+ shift, hash, key, val);
+ }
+ else {
+ assert(IS_COLLISION_NODE(node));
+ return map_node_collision_find(
+ (MapNode_Collision *)node,
+ shift, hash, key, val);
+ }
+}
+
+static int
+map_node_dump(MapNode *node,
+ _PyUnicodeWriter *writer, int level)
+{
+ /* Debug build: __dump__() method implementation for a node.
+
+ This method automatically dispatches to the suitable
+ map_node_{nodetype})_dump method.
+ */
+
+ if (IS_BITMAP_NODE(node)) {
+ return map_node_bitmap_dump(
+ (MapNode_Bitmap *)node, writer, level);
+ }
+ else if (IS_ARRAY_NODE(node)) {
+ return map_node_array_dump(
+ (MapNode_Array *)node, writer, level);
+ }
+ else {
+ assert(IS_COLLISION_NODE(node));
+ return map_node_collision_dump(
+ (MapNode_Collision *)node, writer, level);
+ }
+}
+
+
+/////////////////////////////////// Iterators: Machinery
+
+
+static map_iter_t
+map_iterator_next(MapIteratorState *iter, PyObject **key, PyObject **val);
+
+
+static void
+map_iterator_init(MapIteratorState *iter, MapNode *root)
+{
+ for (uint32_t i = 0; i < _Py_HAMT_MAX_TREE_DEPTH; i++) {
+ iter->i_nodes[i] = NULL;
+ iter->i_pos[i] = 0;
+ }
+
+ iter->i_level = 0;
+
+ /* Note: we don't incref/decref nodes in i_nodes. */
+ iter->i_nodes[0] = root;
+}
+
+static map_iter_t
+map_iterator_bitmap_next(MapIteratorState *iter,
+ PyObject **key, PyObject **val)
+{
+ int8_t level = iter->i_level;
+
+ MapNode_Bitmap *node = (MapNode_Bitmap *)(iter->i_nodes[level]);
+ Py_ssize_t pos = iter->i_pos[level];
+
+ if (pos + 1 >= Py_SIZE(node)) {
+#if !defined(NDEBUG)
+ assert(iter->i_level >= 0);
+ iter->i_nodes[iter->i_level] = NULL;
+#endif
+ iter->i_level--;
+ return map_iterator_next(iter, key, val);
+ }
+
+ if (node->b_array[pos] == NULL) {
+ iter->i_pos[level] = pos + 2;
+
+ assert(level + 1 < _Py_HAMT_MAX_TREE_DEPTH);
+ int8_t next_level = (int8_t)(level + 1);
+ iter->i_level = next_level;
+ iter->i_pos[next_level] = 0;
+ iter->i_nodes[next_level] = (MapNode *)
+ node->b_array[pos + 1];
+
+ return map_iterator_next(iter, key, val);
+ }
+
+ *key = node->b_array[pos];
+ *val = node->b_array[pos + 1];
+ iter->i_pos[level] = pos + 2;
+ return I_ITEM;
+}
+
+static map_iter_t
+map_iterator_collision_next(MapIteratorState *iter,
+ PyObject **key, PyObject **val)
+{
+ int8_t level = iter->i_level;
+
+ MapNode_Collision *node = (MapNode_Collision *)(iter->i_nodes[level]);
+ Py_ssize_t pos = iter->i_pos[level];
+
+ if (pos + 1 >= Py_SIZE(node)) {
+#if !defined(NDEBUG)
+ assert(iter->i_level >= 0);
+ iter->i_nodes[iter->i_level] = NULL;
+#endif
+ iter->i_level--;
+ return map_iterator_next(iter, key, val);
+ }
+
+ *key = node->c_array[pos];
+ *val = node->c_array[pos + 1];
+ iter->i_pos[level] = pos + 2;
+ return I_ITEM;
+}
+
+static map_iter_t
+map_iterator_array_next(MapIteratorState *iter,
+ PyObject **key, PyObject **val)
+{
+ int8_t level = iter->i_level;
+
+ MapNode_Array *node = (MapNode_Array *)(iter->i_nodes[level]);
+ Py_ssize_t pos = iter->i_pos[level];
+
+ if (pos >= HAMT_ARRAY_NODE_SIZE) {
+#if !defined(NDEBUG)
+ assert(iter->i_level >= 0);
+ iter->i_nodes[iter->i_level] = NULL;
+#endif
+ iter->i_level--;
+ return map_iterator_next(iter, key, val);
+ }
+
+ for (Py_ssize_t i = pos; i < HAMT_ARRAY_NODE_SIZE; i++) {
+ if (node->a_array[i] != NULL) {
+ iter->i_pos[level] = i + 1;
+
+ assert((level + 1) < _Py_HAMT_MAX_TREE_DEPTH);
+ int8_t next_level = (int8_t)(level + 1);
+ iter->i_pos[next_level] = 0;
+ iter->i_nodes[next_level] = node->a_array[i];
+ iter->i_level = next_level;
+
+ return map_iterator_next(iter, key, val);
+ }
+ }
+
+#if !defined(NDEBUG)
+ assert(iter->i_level >= 0);
+ iter->i_nodes[iter->i_level] = NULL;
+#endif
+
+ iter->i_level--;
+ return map_iterator_next(iter, key, val);
+}
+
+static map_iter_t
+map_iterator_next(MapIteratorState *iter, PyObject **key, PyObject **val)
+{
+ if (iter->i_level < 0) {
+ return I_END;
+ }
+
+ assert(iter->i_level < _Py_HAMT_MAX_TREE_DEPTH);
+
+ MapNode *current = iter->i_nodes[iter->i_level];
+
+ if (IS_BITMAP_NODE(current)) {
+ return map_iterator_bitmap_next(iter, key, val);
+ }
+ else if (IS_ARRAY_NODE(current)) {
+ return map_iterator_array_next(iter, key, val);
+ }
+ else {
+ assert(IS_COLLISION_NODE(current));
+ return map_iterator_collision_next(iter, key, val);
+ }
+}
+
+
+/////////////////////////////////// HAMT high-level functions
+
+
+static MapObject *
+map_assoc(MapObject *o, PyObject *key, PyObject *val)
+{
+ int32_t key_hash;
+ int added_leaf = 0;
+ MapNode *new_root;
+ MapObject *new_o;
+
+ key_hash = map_hash(key);
+ if (key_hash == -1) {
+ return NULL;
+ }
+
+ new_root = map_node_assoc(
+ (MapNode *)(o->h_root),
+ 0, key_hash, key, val, &added_leaf,
+ 0);
+ if (new_root == NULL) {
+ return NULL;
+ }
+
+ if (new_root == o->h_root) {
+ Py_DECREF(new_root);
+ Py_INCREF(o);
+ return o;
+ }
+
+ new_o = map_alloc();
+ if (new_o == NULL) {
+ Py_DECREF(new_root);
+ return NULL;
+ }
+
+ new_o->h_root = new_root; /* borrow */
+ new_o->h_count = added_leaf ? o->h_count + 1 : o->h_count;
+
+ return new_o;
+}
+
+static MapObject *
+map_without(MapObject *o, PyObject *key)
+{
+ int32_t key_hash = map_hash(key);
+ if (key_hash == -1) {
+ return NULL;
+ }
+
+ MapNode *new_root = NULL;
+
+ map_without_t res = map_node_without(
+ (MapNode *)(o->h_root),
+ 0, key_hash, key,
+ &new_root,
+ 0);
+
+ switch (res) {
+ case W_ERROR:
+ return NULL;
+ case W_EMPTY:
+ return map_new();
+ case W_NOT_FOUND:
+ PyErr_SetObject(PyExc_KeyError, key);
+ return NULL;
+ case W_NEWNODE: {
+ assert(new_root != NULL);
+
+ MapObject *new_o = map_alloc();
+ if (new_o == NULL) {
+ Py_DECREF(new_root);
+ return NULL;
+ }
+
+ new_o->h_root = new_root; /* borrow */
+ new_o->h_count = o->h_count - 1;
+ assert(new_o->h_count >= 0);
+ return new_o;
+ }
+ default:
+ abort();
+ }
+}
+
+static map_find_t
+map_find(BaseMapObject *o, PyObject *key, PyObject **val)
+{
+ if (o->b_count == 0) {
+ return F_NOT_FOUND;
+ }
+
+ int32_t key_hash = map_hash(key);
+ if (key_hash == -1) {
+ return F_ERROR;
+ }
+
+ return map_node_find(o->b_root, 0, key_hash, key, val);
+}
+
+static int
+map_eq(BaseMapObject *v, BaseMapObject *w)
+{
+ if (v == w) {
+ return 1;
+ }
+
+ if (v->b_count != w->b_count) {
+ return 0;
+ }
+
+ MapIteratorState iter;
+ map_iter_t iter_res;
+ map_find_t find_res;
+ PyObject *v_key;
+ PyObject *v_val;
+ PyObject *w_val;
+
+ map_iterator_init(&iter, v->b_root);
+
+ do {
+ iter_res = map_iterator_next(&iter, &v_key, &v_val);
+ if (iter_res == I_ITEM) {
+ find_res = map_find(w, v_key, &w_val);
+ switch (find_res) {
+ case F_ERROR:
+ return -1;
+
+ case F_NOT_FOUND:
+ return 0;
+
+ case F_FOUND: {
+ int cmp = PyObject_RichCompareBool(v_val, w_val, Py_EQ);
+ if (cmp < 0) {
+ return -1;
+ }
+ if (cmp == 0) {
+ return 0;
+ }
+ }
+ }
+ }
+ } while (iter_res != I_END);
+
+ return 1;
+}
+
+static Py_ssize_t
+map_len(BaseMapObject *o)
+{
+ return o->b_count;
+}
+
+static MapObject *
+map_alloc(void)
+{
+ MapObject *o;
+ o = PyObject_GC_New(MapObject, &_Map_Type);
+ if (o == NULL) {
+ return NULL;
+ }
+ o->h_weakreflist = NULL;
+ o->h_hash = -1;
+ o->h_count = 0;
+ o->h_root = NULL;
+ PyObject_GC_Track(o);
+ return o;
+}
+
+static MapObject *
+map_new(void)
+{
+ MapObject *o = map_alloc();
+ if (o == NULL) {
+ return NULL;
+ }
+
+ o->h_root = map_node_bitmap_new(0, 0);
+ if (o->h_root == NULL) {
+ Py_DECREF(o);
+ return NULL;
+ }
+
+ return o;
+}
+
+static PyObject *
+map_dump(MapObject *self)
+{
+ _PyUnicodeWriter writer;
+
+ _PyUnicodeWriter_Init(&writer);
+
+ if (_map_dump_format(&writer, "HAMT(len=%zd):\n", self->h_count)) {
+ goto error;
+ }
+
+ if (map_node_dump(self->h_root, &writer, 0)) {
+ goto error;
+ }
+
+ return _PyUnicodeWriter_Finish(&writer);
+
+error:
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+}
+
+
+/////////////////////////////////// Iterators: Shared Iterator Implementation
+
+
+static int
+map_baseiter_tp_clear(MapIterator *it)
+{
+ Py_CLEAR(it->mi_obj);
+ return 0;
+}
+
+static void
+map_baseiter_tp_dealloc(MapIterator *it)
+{
+ PyObject_GC_UnTrack(it);
+ (void)map_baseiter_tp_clear(it);
+ PyObject_GC_Del(it);
+}
+
+static int
+map_baseiter_tp_traverse(MapIterator *it, visitproc visit, void *arg)
+{
+ Py_VISIT(it->mi_obj);
+ return 0;
+}
+
+static PyObject *
+map_baseiter_tp_iternext(MapIterator *it)
+{
+ PyObject *key;
+ PyObject *val;
+ map_iter_t res = map_iterator_next(&it->mi_iter, &key, &val);
+
+ switch (res) {
+ case I_END:
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+
+ case I_ITEM: {
+ return (*(it->mi_yield))(key, val);
+ }
+
+ default: {
+ abort();
+ }
+ }
+}
+
+static int
+map_baseview_tp_clear(MapView *view)
+{
+ Py_CLEAR(view->mv_obj);
+ Py_CLEAR(view->mv_itertype);
+ return 0;
+}
+
+static void
+map_baseview_tp_dealloc(MapView *view)
+{
+ PyObject_GC_UnTrack(view);
+ (void)map_baseview_tp_clear(view);
+ PyObject_GC_Del(view);
+}
+
+static int
+map_baseview_tp_traverse(MapView *view, visitproc visit, void *arg)
+{
+ Py_VISIT(view->mv_obj);
+ return 0;
+}
+
+static Py_ssize_t
+map_baseview_tp_len(MapView *view)
+{
+ return view->mv_obj->h_count;
+}
+
+static PyMappingMethods MapView_as_mapping = {
+ (lenfunc)map_baseview_tp_len,
+};
+
+static PyObject *
+map_baseview_newiter(PyTypeObject *type, binaryfunc yield, MapObject *map)
+{
+ MapIterator *iter = PyObject_GC_New(MapIterator, type);
+ if (iter == NULL) {
+ return NULL;
+ }
+
+ Py_INCREF(map);
+ iter->mi_obj = map;
+ iter->mi_yield = yield;
+ map_iterator_init(&iter->mi_iter, map->h_root);
+
+ PyObject_GC_Track(iter);
+ return (PyObject *)iter;
+}
+
+static PyObject *
+map_baseview_iter(MapView *view)
+{
+ return map_baseview_newiter(
+ view->mv_itertype, view->mv_yield, view->mv_obj);
+}
+
+static PyObject *
+map_baseview_new(PyTypeObject *type, binaryfunc yield,
+ MapObject *o, PyTypeObject *itertype)
+{
+ MapView *view = PyObject_GC_New(MapView, type);
+ if (view == NULL) {
+ return NULL;
+ }
+
+ Py_INCREF(o);
+ view->mv_obj = o;
+ view->mv_yield = yield;
+
+ Py_INCREF(itertype);
+ view->mv_itertype = itertype;
+
+ PyObject_GC_Track(view);
+ return (PyObject *)view;
+}
+
+#define ITERATOR_TYPE_SHARED_SLOTS \
+ .tp_basicsize = sizeof(MapIterator), \
+ .tp_itemsize = 0, \
+ .tp_dealloc = (destructor)map_baseiter_tp_dealloc, \
+ .tp_getattro = PyObject_GenericGetAttr, \
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, \
+ .tp_traverse = (traverseproc)map_baseiter_tp_traverse, \
+ .tp_clear = (inquiry)map_baseiter_tp_clear, \
+ .tp_iter = PyObject_SelfIter, \
+ .tp_iternext = (iternextfunc)map_baseiter_tp_iternext,
+
+
+#define VIEW_TYPE_SHARED_SLOTS \
+ .tp_basicsize = sizeof(MapView), \
+ .tp_itemsize = 0, \
+ .tp_as_mapping = &MapView_as_mapping, \
+ .tp_dealloc = (destructor)map_baseview_tp_dealloc, \
+ .tp_getattro = PyObject_GenericGetAttr, \
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, \
+ .tp_traverse = (traverseproc)map_baseview_tp_traverse, \
+ .tp_clear = (inquiry)map_baseview_tp_clear, \
+ .tp_iter = (getiterfunc)map_baseview_iter, \
+
+
+/////////////////////////////////// _MapItems_Type
+
+
+PyTypeObject _MapItems_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "items",
+ VIEW_TYPE_SHARED_SLOTS
+};
+
+PyTypeObject _MapItemsIter_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "items_iterator",
+ ITERATOR_TYPE_SHARED_SLOTS
+};
+
+static PyObject *
+map_iter_yield_items(PyObject *key, PyObject *val)
+{
+ return PyTuple_Pack(2, key, val);
+}
+
+static PyObject *
+map_new_items_view(MapObject *o)
+{
+ return map_baseview_new(
+ &_MapItems_Type, map_iter_yield_items, o,
+ &_MapItemsIter_Type);
+}
+
+
+/////////////////////////////////// _MapKeys_Type
+
+
+PyTypeObject _MapKeys_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "keys",
+ VIEW_TYPE_SHARED_SLOTS
+};
+
+PyTypeObject _MapKeysIter_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "keys_iterator",
+ ITERATOR_TYPE_SHARED_SLOTS
+};
+
+static PyObject *
+map_iter_yield_keys(PyObject *key, PyObject *val)
+{
+ Py_INCREF(key);
+ return key;
+}
+
+static PyObject *
+map_new_keys_iter(MapObject *o)
+{
+ return map_baseview_newiter(
+ &_MapKeysIter_Type, map_iter_yield_keys, o);
+}
+
+static PyObject *
+map_new_keys_view(MapObject *o)
+{
+ return map_baseview_new(
+ &_MapKeys_Type, map_iter_yield_keys, o,
+ &_MapKeysIter_Type);
+}
+
+/////////////////////////////////// _MapValues_Type
+
+
+PyTypeObject _MapValues_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "values",
+ VIEW_TYPE_SHARED_SLOTS
+};
+
+PyTypeObject _MapValuesIter_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "values_iterator",
+ ITERATOR_TYPE_SHARED_SLOTS
+};
+
+static PyObject *
+map_iter_yield_values(PyObject *key, PyObject *val)
+{
+ Py_INCREF(val);
+ return val;
+}
+
+static PyObject *
+map_new_values_view(MapObject *o)
+{
+ return map_baseview_new(
+ &_MapValues_Type, map_iter_yield_values, o,
+ &_MapValuesIter_Type);
+}
+
+
+/////////////////////////////////// _Map_Type
+
+
+static PyObject *
+map_dump(MapObject *self);
+
+
+static PyObject *
+map_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ return (PyObject*)map_new();
+}
+
+
+static int
+map_tp_init(MapObject *self, PyObject *args, PyObject *kwds)
+{
+ PyObject *arg = NULL;
+ uint64_t mutid = 0;
+
+ if (!PyArg_UnpackTuple(args, "immutables.Map", 0, 1, &arg)) {
+ return -1;
+ }
+
+ if (arg != NULL) {
+ if (Map_Check(arg)) {
+ MapObject *other = (MapObject *)arg;
+
+ Py_INCREF(other->h_root);
+ Py_SETREF(self->h_root, other->h_root);
+
+ self->h_count = other->h_count;
+ self->h_hash = other->h_hash;
+ }
+ else if (MapMutation_Check(arg)) {
+ PyErr_Format(
+ PyExc_TypeError,
+ "cannot create Maps from MapMutations");
+ return -1;
+ }
+ else {
+ mutid = mutid_counter++;
+ if (map_update_inplace(mutid, (BaseMapObject *)self, arg)) {
+ return -1;
+ }
+ }
+ }
+
+ if (kwds != NULL) {
+ if (!PyArg_ValidateKeywordArguments(kwds)) {
+ return -1;
+ }
+
+ if (!mutid) {
+ mutid = mutid_counter++;
+ }
+
+ if (map_update_inplace(mutid, (BaseMapObject *)self, kwds)) {
+ return -1;
+ }
+ }
+
+ return 0;
+}
+
+
+static int
+map_tp_clear(BaseMapObject *self)
+{
+ Py_CLEAR(self->b_root);
+ return 0;
+}
+
+
+static int
+map_tp_traverse(BaseMapObject *self, visitproc visit, void *arg)
+{
+ Py_VISIT(self->b_root);
+ return 0;
+}
+
+static void
+map_tp_dealloc(BaseMapObject *self)
+{
+ PyObject_GC_UnTrack(self);
+ if (self->b_weakreflist != NULL) {
+ PyObject_ClearWeakRefs((PyObject*)self);
+ }
+ (void)map_tp_clear(self);
+ Py_TYPE(self)->tp_free(self);
+}
+
+
+static PyObject *
+map_tp_richcompare(PyObject *v, PyObject *w, int op)
+{
+ if (!Map_Check(v) || !Map_Check(w) || (op != Py_EQ && op != Py_NE)) {
+ Py_RETURN_NOTIMPLEMENTED;
+ }
+
+ int res = map_eq((BaseMapObject *)v, (BaseMapObject *)w);
+ if (res < 0) {
+ return NULL;
+ }
+
+ if (op == Py_NE) {
+ res = !res;
+ }
+
+ if (res) {
+ Py_RETURN_TRUE;
+ }
+ else {
+ Py_RETURN_FALSE;
+ }
+}
+
+static int
+map_tp_contains(BaseMapObject *self, PyObject *key)
+{
+ PyObject *val;
+ map_find_t res = map_find(self, key, &val);
+ switch (res) {
+ case F_ERROR:
+ return -1;
+ case F_NOT_FOUND:
+ return 0;
+ case F_FOUND:
+ return 1;
+ default:
+ abort();
+ }
+}
+
+static PyObject *
+map_tp_subscript(BaseMapObject *self, PyObject *key)
+{
+ PyObject *val;
+ map_find_t res = map_find(self, key, &val);
+ switch (res) {
+ case F_ERROR:
+ return NULL;
+ case F_FOUND:
+ Py_INCREF(val);
+ return val;
+ case F_NOT_FOUND:
+ PyErr_SetObject(PyExc_KeyError, key);
+ return NULL;
+ default:
+ abort();
+ }
+}
+
+static Py_ssize_t
+map_tp_len(BaseMapObject *self)
+{
+ return map_len(self);
+}
+
+static PyObject *
+map_tp_iter(MapObject *self)
+{
+ return map_new_keys_iter(self);
+}
+
+static PyObject *
+map_py_set(MapObject *self, PyObject *args)
+{
+ PyObject *key;
+ PyObject *val;
+
+ if (!PyArg_UnpackTuple(args, "set", 2, 2, &key, &val)) {
+ return NULL;
+ }
+
+ return (PyObject *)map_assoc(self, key, val);
+}
+
+static PyObject *
+map_py_get(BaseMapObject *self, PyObject *args)
+{
+ PyObject *key;
+ PyObject *def = NULL;
+
+ if (!PyArg_UnpackTuple(args, "get", 1, 2, &key, &def)) {
+ return NULL;
+ }
+
+ PyObject *val = NULL;
+ map_find_t res = map_find(self, key, &val);
+ switch (res) {
+ case F_ERROR:
+ return NULL;
+ case F_FOUND:
+ Py_INCREF(val);
+ return val;
+ case F_NOT_FOUND:
+ if (def == NULL) {
+ Py_RETURN_NONE;
+ }
+ Py_INCREF(def);
+ return def;
+ default:
+ abort();
+ }
+}
+
+static PyObject *
+map_py_delete(MapObject *self, PyObject *key)
+{
+ return (PyObject *)map_without(self, key);
+}
+
+static PyObject *
+map_py_mutate(MapObject *self, PyObject *args)
+{
+
+ MapMutationObject *o;
+ o = PyObject_GC_New(MapMutationObject, &_MapMutation_Type);
+ if (o == NULL) {
+ return NULL;
+ }
+ o->m_weakreflist = NULL;
+ o->m_count = self->h_count;
+
+ Py_INCREF(self->h_root);
+ o->m_root = self->h_root;
+
+ o->m_mutid = mutid_counter++;
+
+ PyObject_GC_Track(o);
+ return (PyObject *)o;
+}
+
+static PyObject *
+map_py_update(MapObject *self, PyObject *args, PyObject *kwds)
+{
+ PyObject *arg = NULL;
+ MapObject *new = NULL;
+ uint64_t mutid = 0;
+
+ if (!PyArg_UnpackTuple(args, "update", 0, 1, &arg)) {
+ return NULL;
+ }
+
+ if (arg != NULL) {
+ mutid = mutid_counter++;
+ new = map_update(mutid, self, arg);
+ if (new == NULL) {
+ return NULL;
+ }
+ }
+ else {
+ Py_INCREF(self);
+ new = self;
+ }
+
+ if (kwds != NULL) {
+ if (!PyArg_ValidateKeywordArguments(kwds)) {
+ Py_DECREF(new);
+ return NULL;
+ }
+
+ if (!mutid) {
+ mutid = mutid_counter++;
+ }
+
+ MapObject *new2 = map_update(mutid, new, kwds);
+ Py_DECREF(new);
+ if (new2 == NULL) {
+ return NULL;
+ }
+ new = new2;
+ }
+
+ return (PyObject *)new;
+}
+
+static PyObject *
+map_py_items(MapObject *self, PyObject *args)
+{
+ return map_new_items_view(self);
+}
+
+static PyObject *
+map_py_values(MapObject *self, PyObject *args)
+{
+ return map_new_values_view(self);
+}
+
+static PyObject *
+map_py_keys(MapObject *self, PyObject *args)
+{
+ return map_new_keys_view(self);
+}
+
+static PyObject *
+map_py_dump(MapObject *self, PyObject *args)
+{
+ return map_dump(self);
+}
+
+
+static PyObject *
+map_py_repr(BaseMapObject *m)
+{
+ Py_ssize_t i;
+ _PyUnicodeWriter writer;
+
+
+ i = Py_ReprEnter((PyObject *)m);
+ if (i != 0) {
+ return i > 0 ? PyUnicode_FromString("{...}") : NULL;
+ }
+
+ _PyUnicodeWriter_Init(&writer);
+
+ if (MapMutation_Check(m)) {
+ if (_PyUnicodeWriter_WriteASCIIString(
+ &writer, "immutables.MapMutation({", 24) < 0)
+ {
+ goto error;
+ }
+ }
+ else {
+ if (_PyUnicodeWriter_WriteASCIIString(
+ &writer, "immutables.Map({", 16) < 0)
+ {
+ goto error;
+ }
+ }
+
+ MapIteratorState iter;
+ map_iter_t iter_res;
+ map_iterator_init(&iter, m->b_root);
+ int second = 0;
+ do {
+ PyObject *v_key;
+ PyObject *v_val;
+
+ iter_res = map_iterator_next(&iter, &v_key, &v_val);
+ if (iter_res == I_ITEM) {
+ if (second) {
+ if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) {
+ goto error;
+ }
+ }
+
+ PyObject *s = PyObject_Repr(v_key);
+ if (s == NULL) {
+ goto error;
+ }
+ if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
+ Py_DECREF(s);
+ goto error;
+ }
+ Py_DECREF(s);
+
+ if (_PyUnicodeWriter_WriteASCIIString(&writer, ": ", 2) < 0) {
+ goto error;
+ }
+
+ s = PyObject_Repr(v_val);
+ if (s == NULL) {
+ goto error;
+ }
+ if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
+ Py_DECREF(s);
+ goto error;
+ }
+ Py_DECREF(s);
+ }
+
+ second = 1;
+ } while (iter_res != I_END);
+
+ if (_PyUnicodeWriter_WriteASCIIString(&writer, "})", 2) < 0) {
+ goto error;
+ }
+
+ Py_ReprLeave((PyObject *)m);
+ return _PyUnicodeWriter_Finish(&writer);
+
+error:
+ _PyUnicodeWriter_Dealloc(&writer);
+ Py_ReprLeave((PyObject *)m);
+ return NULL;
+}
+
+
+static Py_uhash_t
+_shuffle_bits(Py_uhash_t h)
+{
+ return ((h ^ 89869747UL) ^ (h << 16)) * 3644798167UL;
+}
+
+
+static Py_hash_t
+map_py_hash(MapObject *self)
+{
+ /* Adapted version of frozenset.__hash__: it's important
+ that Map.__hash__ is independant of key/values order.
+
+ Optimization idea: compute and memoize intermediate
+ hash values for HAMT nodes.
+ */
+
+ if (self->h_hash != -1) {
+ return self->h_hash;
+ }
+
+ Py_uhash_t hash = 0;
+
+ MapIteratorState iter;
+ map_iter_t iter_res;
+ map_iterator_init(&iter, self->h_root);
+ do {
+ PyObject *v_key;
+ PyObject *v_val;
+
+ iter_res = map_iterator_next(&iter, &v_key, &v_val);
+ if (iter_res == I_ITEM) {
+ Py_hash_t vh = PyObject_Hash(v_key);
+ if (vh == -1) {
+ return -1;
+ }
+ hash ^= _shuffle_bits((Py_uhash_t)vh);
+
+ vh = PyObject_Hash(v_val);
+ if (vh == -1) {
+ return -1;
+ }
+ hash ^= _shuffle_bits((Py_uhash_t)vh);
+ }
+ } while (iter_res != I_END);
+
+ hash ^= ((Py_uhash_t)self->h_count * 2 + 1) * 1927868237UL;
+
+ hash ^= (hash >> 11) ^ (hash >> 25);
+ hash = hash * 69069U + 907133923UL;
+
+ self->h_hash = (Py_hash_t)hash;
+ if (self->h_hash == -1) {
+ self->h_hash = 1;
+ }
+ return self->h_hash;
+}
+
+static PyObject *
+map_reduce(MapObject *self)
+{
+ MapIteratorState iter;
+ map_iter_t iter_res;
+
+ PyObject *dict = PyDict_New();
+ if (dict == NULL) {
+ return NULL;
+ }
+
+ map_iterator_init(&iter, self->h_root);
+ do {
+ PyObject *key;
+ PyObject *val;
+
+ iter_res = map_iterator_next(&iter, &key, &val);
+ if (iter_res == I_ITEM) {
+ if (PyDict_SetItem(dict, key, val) < 0) {
+ Py_DECREF(dict);
+ return NULL;
+ }
+ }
+ } while (iter_res != I_END);
+
+ PyObject *args = PyTuple_Pack(1, dict);
+ Py_DECREF(dict);
+ if (args == NULL) {
+ return NULL;
+ }
+
+ PyObject *tup = PyTuple_Pack(2, Py_TYPE(self), args);
+ Py_DECREF(args);
+ return tup;
+}
+
+static PyObject *
+map_py_class_getitem(PyObject *type, PyObject *item)
+{
+ Py_INCREF(type);
+ return type;
+}
+
+static PyMethodDef Map_methods[] = {
+ {"set", (PyCFunction)map_py_set, METH_VARARGS, NULL},
+ {"get", (PyCFunction)map_py_get, METH_VARARGS, NULL},
+ {"delete", (PyCFunction)map_py_delete, METH_O, NULL},
+ {"mutate", (PyCFunction)map_py_mutate, METH_NOARGS, NULL},
+ {"items", (PyCFunction)map_py_items, METH_NOARGS, NULL},
+ {"keys", (PyCFunction)map_py_keys, METH_NOARGS, NULL},
+ {"values", (PyCFunction)map_py_values, METH_NOARGS, NULL},
+ {"update", (PyCFunction)map_py_update, METH_VARARGS | METH_KEYWORDS, NULL},
+ {"__reduce__", (PyCFunction)map_reduce, METH_NOARGS, NULL},
+ {"__dump__", (PyCFunction)map_py_dump, METH_NOARGS, NULL},
+ {
+ "__class_getitem__",
+ (PyCFunction)map_py_class_getitem,
+ METH_O|METH_CLASS,
+ NULL
+ },
+ {NULL, NULL}
+};
+
+static PySequenceMethods Map_as_sequence = {
+ 0, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ 0, /* sq_item */
+ 0, /* sq_slice */
+ 0, /* sq_ass_item */
+ 0, /* sq_ass_slice */
+ (objobjproc)map_tp_contains, /* sq_contains */
+ 0, /* sq_inplace_concat */
+ 0, /* sq_inplace_repeat */
+};
+
+static PyMappingMethods Map_as_mapping = {
+ (lenfunc)map_tp_len, /* mp_length */
+ (binaryfunc)map_tp_subscript, /* mp_subscript */
+};
+
+PyTypeObject _Map_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "immutables._map.Map",
+ sizeof(MapObject),
+ .tp_methods = Map_methods,
+ .tp_as_mapping = &Map_as_mapping,
+ .tp_as_sequence = &Map_as_sequence,
+ .tp_iter = (getiterfunc)map_tp_iter,
+ .tp_dealloc = (destructor)map_tp_dealloc,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_richcompare = map_tp_richcompare,
+ .tp_traverse = (traverseproc)map_tp_traverse,
+ .tp_clear = (inquiry)map_tp_clear,
+ .tp_new = map_tp_new,
+ .tp_init = (initproc)map_tp_init,
+ .tp_weaklistoffset = offsetof(MapObject, h_weakreflist),
+ .tp_hash = (hashfunc)map_py_hash,
+ .tp_repr = (reprfunc)map_py_repr,
+};
+
+
+/////////////////////////////////// MapMutation
+
+
+static int
+map_node_update_from_map(uint64_t mutid,
+ MapObject *map,
+ MapNode *root, Py_ssize_t count,
+ MapNode **new_root, Py_ssize_t *new_count)
+{
+ assert(Map_Check(map));
+
+ MapIteratorState iter;
+ map_iter_t iter_res;
+
+ MapNode *last_root;
+ Py_ssize_t last_count;
+
+ Py_INCREF(root);
+ last_root = root;
+ last_count = count;
+
+ map_iterator_init(&iter, map->h_root);
+ do {
+ PyObject *key;
+ PyObject *val;
+ int32_t key_hash;
+ int added_leaf;
+
+ iter_res = map_iterator_next(&iter, &key, &val);
+ if (iter_res == I_ITEM) {
+ key_hash = map_hash(key);
+ if (key_hash == -1) {
+ goto err;
+ }
+
+ MapNode *iter_root = map_node_assoc(
+ last_root,
+ 0, key_hash, key, val, &added_leaf,
+ mutid);
+
+ if (iter_root == NULL) {
+ goto err;
+ }
+
+ if (added_leaf) {
+ last_count++;
+ }
+
+ Py_SETREF(last_root, iter_root);
+ }
+ } while (iter_res != I_END);
+
+ *new_root = last_root;
+ *new_count = last_count;
+
+ return 0;
+
+err:
+ Py_DECREF(last_root);
+ return -1;
+}
+
+
+static int
+map_node_update_from_dict(uint64_t mutid,
+ PyObject *dct,
+ MapNode *root, Py_ssize_t count,
+ MapNode **new_root, Py_ssize_t *new_count)
+{
+ assert(PyDict_Check(dct));
+
+ PyObject *it = PyObject_GetIter(dct);
+ if (it == NULL) {
+ return -1;
+ }
+
+ MapNode *last_root;
+ Py_ssize_t last_count;
+
+ Py_INCREF(root);
+ last_root = root;
+ last_count = count;
+
+ PyObject *key;
+
+ while ((key = PyIter_Next(it))) {
+ PyObject *val;
+ int added_leaf;
+ int32_t key_hash;
+
+ key_hash = map_hash(key);
+ if (key_hash == -1) {
+ Py_DECREF(key);
+ goto err;
+ }
+
+ val = PyDict_GetItemWithError(dct, key);
+ if (val == NULL) {
+ Py_DECREF(key);
+ goto err;
+ }
+
+ MapNode *iter_root = map_node_assoc(
+ last_root,
+ 0, key_hash, key, val, &added_leaf,
+ mutid);
+
+ Py_DECREF(key);
+
+ if (iter_root == NULL) {
+ goto err;
+ }
+
+ if (added_leaf) {
+ last_count++;
+ }
+
+ Py_SETREF(last_root, iter_root);
+ }
+
+ if (key == NULL && PyErr_Occurred()) {
+ goto err;
+ }
+
+ Py_DECREF(it);
+
+ *new_root = last_root;
+ *new_count = last_count;
+
+ return 0;
+
+err:
+ Py_DECREF(it);
+ Py_DECREF(last_root);
+ return -1;
+}
+
+
+static int
+map_node_update_from_seq(uint64_t mutid,
+ PyObject *seq,
+ MapNode *root, Py_ssize_t count,
+ MapNode **new_root, Py_ssize_t *new_count)
+{
+ PyObject *it;
+ Py_ssize_t i;
+ PyObject *item = NULL;
+ PyObject *fast = NULL;
+
+ MapNode *last_root;
+ Py_ssize_t last_count;
+
+ it = PyObject_GetIter(seq);
+ if (it == NULL) {
+ return -1;
+ }
+
+ Py_INCREF(root);
+ last_root = root;
+ last_count = count;
+
+ for (i = 0; ; i++) {
+ PyObject *key, *val;
+ Py_ssize_t n;
+ int32_t key_hash;
+ int added_leaf;
+
+ item = PyIter_Next(it);
+ if (item == NULL) {
+ if (PyErr_Occurred()) {
+ goto err;
+ }
+ break;
+ }
+
+ fast = PySequence_Fast(item, "");
+ if (fast == NULL) {
+ if (PyErr_ExceptionMatches(PyExc_TypeError))
+ PyErr_Format(PyExc_TypeError,
+ "cannot convert map update "
+ "sequence element #%zd to a sequence",
+ i);
+ goto err;
+ }
+
+ n = PySequence_Fast_GET_SIZE(fast);
+ if (n != 2) {
+ PyErr_Format(PyExc_ValueError,
+ "map update sequence element #%zd "
+ "has length %zd; 2 is required",
+ i, n);
+ goto err;
+ }
+
+ key = PySequence_Fast_GET_ITEM(fast, 0);
+ val = PySequence_Fast_GET_ITEM(fast, 1);
+ Py_INCREF(key);
+ Py_INCREF(val);
+
+ key_hash = map_hash(key);
+ if (key_hash == -1) {
+ Py_DECREF(key);
+ Py_DECREF(val);
+ goto err;
+ }
+
+ MapNode *iter_root = map_node_assoc(
+ last_root,
+ 0, key_hash, key, val, &added_leaf,
+ mutid);
+
+ Py_DECREF(key);
+ Py_DECREF(val);
+
+ if (iter_root == NULL) {
+ goto err;
+ }
+
+ if (added_leaf) {
+ last_count++;
+ }
+
+ Py_SETREF(last_root, iter_root);
+
+ Py_DECREF(fast);
+ Py_DECREF(item);
+ }
+
+ Py_DECREF(it);
+
+ *new_root = last_root;
+ *new_count = last_count;
+
+ return 0;
+
+err:
+ Py_DECREF(last_root);
+ Py_XDECREF(item);
+ Py_XDECREF(fast);
+ Py_DECREF(it);
+ return -1;
+}
+
+
+static int
+map_node_update(uint64_t mutid,
+ PyObject *src,
+ MapNode *root, Py_ssize_t count,
+ MapNode **new_root, Py_ssize_t *new_count)
+{
+ if (Map_Check(src)) {
+ return map_node_update_from_map(
+ mutid, (MapObject *)src, root, count, new_root, new_count);
+ }
+ else if (PyDict_Check(src)) {
+ return map_node_update_from_dict(
+ mutid, src, root, count, new_root, new_count);
+ }
+ else {
+ return map_node_update_from_seq(
+ mutid, src, root, count, new_root, new_count);
+ }
+}
+
+
+static int
+map_update_inplace(uint64_t mutid, BaseMapObject *o, PyObject *src)
+{
+ MapNode *new_root = NULL;
+ Py_ssize_t new_count;
+
+ int ret = map_node_update(
+ mutid, src,
+ o->b_root, o->b_count,
+ &new_root, &new_count);
+
+ if (ret) {
+ return -1;
+ }
+
+ assert(new_root);
+
+ Py_SETREF(o->b_root, new_root);
+ o->b_count = new_count;
+
+ return 0;
+}
+
+
+static MapObject *
+map_update(uint64_t mutid, MapObject *o, PyObject *src)
+{
+ MapNode *new_root = NULL;
+ Py_ssize_t new_count;
+
+ int ret = map_node_update(
+ mutid, src,
+ o->h_root, o->h_count,
+ &new_root, &new_count);
+
+ if (ret) {
+ return NULL;
+ }
+
+ assert(new_root);
+
+ MapObject *new = map_alloc();
+ if (new == NULL) {
+ Py_DECREF(new_root);
+ return NULL;
+ }
+
+ Py_XSETREF(new->h_root, new_root);
+ new->h_count = new_count;
+
+ return new;
+}
+
+static int
+mapmut_check_finalized(MapMutationObject *o)
+{
+ if (o->m_mutid == 0) {
+ PyErr_Format(
+ PyExc_ValueError,
+ "mutation %R has been finished",
+ o, NULL);
+ return -1;
+ }
+
+ return 0;
+}
+
+static int
+mapmut_delete(MapMutationObject *o, PyObject *key, int32_t key_hash)
+{
+ MapNode *new_root = NULL;
+
+ assert(key_hash != -1);
+ map_without_t res = map_node_without(
+ (MapNode *)(o->m_root),
+ 0, key_hash, key,
+ &new_root,
+ o->m_mutid);
+
+ switch (res) {
+ case W_ERROR:
+ return -1;
+
+ case W_EMPTY:
+ new_root = map_node_bitmap_new(0, o->m_mutid);
+ if (new_root == NULL) {
+ return -1;
+ }
+ Py_SETREF(o->m_root, new_root);
+ o->m_count = 0;
+ return 0;
+
+ case W_NOT_FOUND:
+ PyErr_SetObject(PyExc_KeyError, key);
+ return -1;
+
+ case W_NEWNODE: {
+ assert(new_root != NULL);
+ Py_SETREF(o->m_root, new_root);
+ o->m_count--;
+ return 0;
+ }
+
+ default:
+ abort();
+ }
+}
+
+static int
+mapmut_set(MapMutationObject *o, PyObject *key, int32_t key_hash,
+ PyObject *val)
+{
+ int added_leaf = 0;
+
+ assert(key_hash != -1);
+ MapNode *new_root = map_node_assoc(
+ (MapNode *)(o->m_root),
+ 0, key_hash, key, val, &added_leaf,
+ o->m_mutid);
+ if (new_root == NULL) {
+ return -1;
+ }
+
+ if (added_leaf) {
+ o->m_count++;
+ }
+
+ if (new_root == o->m_root) {
+ Py_DECREF(new_root);
+ return 0;
+ }
+
+ Py_SETREF(o->m_root, new_root);
+ return 0;
+}
+
+static int
+mapmut_finish(MapMutationObject *o)
+{
+ o->m_mutid = 0;
+ return 0;
+}
+
+static PyObject *
+mapmut_py_set(MapMutationObject *o, PyObject *args)
+{
+ PyObject *key;
+ PyObject *val;
+
+ if (!PyArg_UnpackTuple(args, "set", 2, 2, &key, &val)) {
+ return NULL;
+ }
+
+ if (mapmut_check_finalized(o)) {
+ return NULL;
+ }
+
+ int32_t key_hash = map_hash(key);
+ if (key_hash == -1) {
+ return NULL;
+ }
+
+ if (mapmut_set(o, key, key_hash, val)) {
+ return NULL;
+ }
+
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+mapmut_tp_richcompare(PyObject *v, PyObject *w, int op)
+{
+ if (!MapMutation_Check(v) || !MapMutation_Check(w) ||
+ (op != Py_EQ && op != Py_NE))
+ {
+ Py_RETURN_NOTIMPLEMENTED;
+ }
+
+ int res = map_eq((BaseMapObject *)v, (BaseMapObject *)w);
+ if (res < 0) {
+ return NULL;
+ }
+
+ if (op == Py_NE) {
+ res = !res;
+ }
+
+ if (res) {
+ Py_RETURN_TRUE;
+ }
+ else {
+ Py_RETURN_FALSE;
+ }
+}
+
+static PyObject *
+mapmut_py_update(MapMutationObject *self, PyObject *args, PyObject *kwds)
+{
+ PyObject *arg = NULL;
+
+ if (!PyArg_UnpackTuple(args, "update", 0, 1, &arg)) {
+ return NULL;
+ }
+
+ if (mapmut_check_finalized(self)) {
+ return NULL;
+ }
+
+ if (arg != NULL) {
+ if (map_update_inplace(self->m_mutid, (BaseMapObject *)self, arg)) {
+ return NULL;
+ }
+ }
+
+ if (kwds != NULL) {
+ if (!PyArg_ValidateKeywordArguments(kwds)) {
+ return NULL;
+ }
+
+ if (map_update_inplace(self->m_mutid, (BaseMapObject *)self, kwds)) {
+ return NULL;
+ }
+ }
+
+ Py_RETURN_NONE;
+}
+
+
+static PyObject *
+mapmut_py_finish(MapMutationObject *self, PyObject *args)
+{
+ if (mapmut_finish(self)) {
+ return NULL;
+ }
+
+ MapObject *o = map_alloc();
+ if (o == NULL) {
+ return NULL;
+ }
+
+ Py_INCREF(self->m_root);
+ o->h_root = self->m_root;
+ o->h_count = self->m_count;
+
+ return (PyObject *)o;
+}
+
+static PyObject *
+mapmut_py_enter(MapMutationObject *self, PyObject *args)
+{
+ Py_INCREF(self);
+ return (PyObject *)self;
+}
+
+static PyObject *
+mapmut_py_exit(MapMutationObject *self, PyObject *args)
+{
+ if (mapmut_finish(self)) {
+ return NULL;
+ }
+ Py_RETURN_FALSE;
+}
+
+static int
+mapmut_tp_ass_sub(MapMutationObject *self, PyObject *key, PyObject *val)
+{
+ if (mapmut_check_finalized(self)) {
+ return -1;
+ }
+
+ int32_t key_hash = map_hash(key);
+ if (key_hash == -1) {
+ return -1;
+ }
+
+ if (val == NULL) {
+ return mapmut_delete(self, key, key_hash);
+ }
+ else {
+ return mapmut_set(self, key, key_hash, val);
+ }
+}
+
+static PyObject *
+mapmut_py_pop(MapMutationObject *self, PyObject *args)
+{
+ PyObject *key, *deflt = NULL, *val = NULL;
+
+ if(!PyArg_UnpackTuple(args, "pop", 1, 2, &key, &deflt)) {
+ return NULL;
+ }
+
+ if (mapmut_check_finalized(self)) {
+ return NULL;
+ }
+
+ if (!self->m_count) {
+ goto not_found;
+ }
+
+ int32_t key_hash = map_hash(key);
+ if (key_hash == -1) {
+ return NULL;
+ }
+
+ map_find_t find_res = map_node_find(self->m_root, 0, key_hash, key, &val);
+
+ switch (find_res) {
+ case F_ERROR:
+ return NULL;
+
+ case F_NOT_FOUND:
+ goto not_found;
+
+ case F_FOUND:
+ break;
+
+ default:
+ abort();
+ }
+
+ Py_INCREF(val);
+
+ if (mapmut_delete(self, key, key_hash)) {
+ Py_DECREF(val);
+ return NULL;
+ }
+
+ return val;
+
+not_found:
+ if (deflt) {
+ Py_INCREF(deflt);
+ return deflt;
+ }
+
+ PyErr_SetObject(PyExc_KeyError, key);
+ return NULL;
+}
+
+
+static PyMethodDef MapMutation_methods[] = {
+ {"set", (PyCFunction)mapmut_py_set, METH_VARARGS, NULL},
+ {"get", (PyCFunction)map_py_get, METH_VARARGS, NULL},
+ {"pop", (PyCFunction)mapmut_py_pop, METH_VARARGS, NULL},
+ {"finish", (PyCFunction)mapmut_py_finish, METH_NOARGS, NULL},
+ {"update", (PyCFunction)mapmut_py_update,
+ METH_VARARGS | METH_KEYWORDS, NULL},
+ {"__enter__", (PyCFunction)mapmut_py_enter, METH_NOARGS, NULL},
+ {"__exit__", (PyCFunction)mapmut_py_exit, METH_VARARGS, NULL},
+ {NULL, NULL}
+};
+
+static PySequenceMethods MapMutation_as_sequence = {
+ 0, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ 0, /* sq_item */
+ 0, /* sq_slice */
+ 0, /* sq_ass_item */
+ 0, /* sq_ass_slice */
+ (objobjproc)map_tp_contains, /* sq_contains */
+ 0, /* sq_inplace_concat */
+ 0, /* sq_inplace_repeat */
+};
+
+static PyMappingMethods MapMutation_as_mapping = {
+ (lenfunc)map_tp_len, /* mp_length */
+ (binaryfunc)map_tp_subscript, /* mp_subscript */
+ (objobjargproc)mapmut_tp_ass_sub, /* mp_subscript */
+};
+
+PyTypeObject _MapMutation_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "immutables._map.MapMutation",
+ sizeof(MapMutationObject),
+ .tp_methods = MapMutation_methods,
+ .tp_as_mapping = &MapMutation_as_mapping,
+ .tp_as_sequence = &MapMutation_as_sequence,
+ .tp_dealloc = (destructor)map_tp_dealloc,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)map_tp_traverse,
+ .tp_richcompare = mapmut_tp_richcompare,
+ .tp_clear = (inquiry)map_tp_clear,
+ .tp_weaklistoffset = offsetof(MapMutationObject, m_weakreflist),
+ .tp_repr = (reprfunc)map_py_repr,
+ .tp_hash = PyObject_HashNotImplemented,
+};
+
+
+/////////////////////////////////// Tree Node Types
+
+
+PyTypeObject _Map_ArrayNode_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "map_array_node",
+ sizeof(MapNode_Array),
+ 0,
+ .tp_dealloc = (destructor)map_node_array_dealloc,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)map_node_array_traverse,
+ .tp_free = PyObject_GC_Del,
+ .tp_hash = PyObject_HashNotImplemented,
+};
+
+PyTypeObject _Map_BitmapNode_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "map_bitmap_node",
+ sizeof(MapNode_Bitmap) - sizeof(PyObject *),
+ sizeof(PyObject *),
+ .tp_dealloc = (destructor)map_node_bitmap_dealloc,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)map_node_bitmap_traverse,
+ .tp_free = PyObject_GC_Del,
+ .tp_hash = PyObject_HashNotImplemented,
+};
+
+PyTypeObject _Map_CollisionNode_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "map_collision_node",
+ sizeof(MapNode_Collision) - sizeof(PyObject *),
+ sizeof(PyObject *),
+ .tp_dealloc = (destructor)map_node_collision_dealloc,
+ .tp_getattro = PyObject_GenericGetAttr,
+ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,
+ .tp_traverse = (traverseproc)map_node_collision_traverse,
+ .tp_free = PyObject_GC_Del,
+ .tp_hash = PyObject_HashNotImplemented,
+};
+
+
+static void
+module_free(void *m)
+{
+ Py_CLEAR(_empty_bitmap_node);
+}
+
+
+static struct PyModuleDef _mapmodule = {
+ PyModuleDef_HEAD_INIT, /* m_base */
+ "_map", /* m_name */
+ NULL, /* m_doc */
+ -1, /* m_size */
+ NULL, /* m_methods */
+ NULL, /* m_slots */
+ NULL, /* m_traverse */
+ NULL, /* m_clear */
+ module_free, /* m_free */
+};
+
+
+PyMODINIT_FUNC
+PyInit__map(void)
+{
+ PyObject *m = PyModule_Create(&_mapmodule);
+
+ if ((PyType_Ready(&_Map_Type) < 0) ||
+ (PyType_Ready(&_MapMutation_Type) < 0) ||
+ (PyType_Ready(&_Map_ArrayNode_Type) < 0) ||
+ (PyType_Ready(&_Map_BitmapNode_Type) < 0) ||
+ (PyType_Ready(&_Map_CollisionNode_Type) < 0) ||
+ (PyType_Ready(&_MapKeys_Type) < 0) ||
+ (PyType_Ready(&_MapValues_Type) < 0) ||
+ (PyType_Ready(&_MapItems_Type) < 0) ||
+ (PyType_Ready(&_MapKeysIter_Type) < 0) ||
+ (PyType_Ready(&_MapValuesIter_Type) < 0) ||
+ (PyType_Ready(&_MapItemsIter_Type) < 0))
+ {
+ return 0;
+ }
+
+ Py_INCREF(&_Map_Type);
+ if (PyModule_AddObject(m, "Map", (PyObject *)&_Map_Type) < 0) {
+ Py_DECREF(&_Map_Type);
+ return NULL;
+ }
+
+ return m;
+}
diff --git a/immutables/_map.h b/immutables/_map.h
new file mode 100644
index 0000000..28142ce
--- /dev/null
+++ b/immutables/_map.h
@@ -0,0 +1,107 @@
+#ifndef IMMUTABLES_MAP_H
+#define IMMUTABLES_MAP_H
+
+#include <stdint.h>
+#include "Python.h"
+
+#define _Py_HAMT_MAX_TREE_DEPTH 8
+
+
+#define Map_Check(o) (Py_TYPE(o) == &_Map_Type)
+#define MapMutation_Check(o) (Py_TYPE(o) == &_MapMutation_Type)
+
+
+/* Abstract tree node. */
+typedef struct {
+ PyObject_HEAD
+} MapNode;
+
+
+#define _MapCommonFields(pref) \
+ PyObject_HEAD \
+ MapNode *pref##_root; \
+ PyObject *pref##_weakreflist; \
+ Py_ssize_t pref##_count;
+
+
+/* Base mapping struct; used in methods shared between
+ MapObject and MapMutationObject types. */
+typedef struct {
+ _MapCommonFields(b)
+} BaseMapObject;
+
+
+/* An HAMT immutable mapping collection. */
+typedef struct {
+ _MapCommonFields(h)
+ Py_hash_t h_hash;
+} MapObject;
+
+
+/* MapMutation object (returned from `map.mutate()`.) */
+typedef struct {
+ _MapCommonFields(m)
+ uint64_t m_mutid;
+} MapMutationObject;
+
+
+/* A struct to hold the state of depth-first traverse of the tree.
+
+ HAMT is an immutable collection. Iterators will hold a strong reference
+ to it, and every node in the HAMT has strong references to its children.
+
+ So for iterators, we can implement zero allocations and zero reference
+ inc/dec depth-first iteration.
+
+ - i_nodes: an array of seven pointers to tree nodes
+ - i_level: the current node in i_nodes
+ - i_pos: an array of positions within nodes in i_nodes.
+*/
+typedef struct {
+ MapNode *i_nodes[_Py_HAMT_MAX_TREE_DEPTH];
+ Py_ssize_t i_pos[_Py_HAMT_MAX_TREE_DEPTH];
+ int8_t i_level;
+} MapIteratorState;
+
+
+/* Base iterator object.
+
+ Contains the iteration state, a pointer to the HAMT tree,
+ and a pointer to the 'yield function'. The latter is a simple
+ function that returns a key/value tuple for the 'Items' iterator,
+ just a key for the 'Keys' iterator, and a value for the 'Values'
+ iterator.
+*/
+
+typedef struct {
+ PyObject_HEAD
+ MapObject *mv_obj;
+ binaryfunc mv_yield;
+ PyTypeObject *mv_itertype;
+} MapView;
+
+typedef struct {
+ PyObject_HEAD
+ MapObject *mi_obj;
+ binaryfunc mi_yield;
+ MapIteratorState mi_iter;
+} MapIterator;
+
+
+/* PyTypes */
+
+
+PyTypeObject _Map_Type;
+PyTypeObject _MapMutation_Type;
+PyTypeObject _Map_ArrayNode_Type;
+PyTypeObject _Map_BitmapNode_Type;
+PyTypeObject _Map_CollisionNode_Type;
+PyTypeObject _MapKeys_Type;
+PyTypeObject _MapValues_Type;
+PyTypeObject _MapItems_Type;
+PyTypeObject _MapKeysIter_Type;
+PyTypeObject _MapValuesIter_Type;
+PyTypeObject _MapItemsIter_Type;
+
+
+#endif
diff --git a/immutables/_map.pyi b/immutables/_map.pyi
new file mode 100644
index 0000000..4590cd4
--- /dev/null
+++ b/immutables/_map.pyi
@@ -0,0 +1,73 @@
+from typing import Any
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import Mapping
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import Union
+from typing import overload
+
+from ._protocols import IterableItems
+from ._protocols import MapItems
+from ._protocols import MapKeys
+from ._protocols import MapMutation
+from ._protocols import MapValues
+from ._protocols import HT
+from ._protocols import KT
+from ._protocols import T
+from ._protocols import VT_co
+
+
+class Map(Mapping[KT, VT_co]):
+ @overload
+ def __init__(self) -> None: ...
+ @overload
+ def __init__(self: Map[str, VT_co], **kw: VT_co) -> None: ...
+ @overload
+ def __init__(
+ self, __col: Union[IterableItems[KT, VT_co], Iterable[Tuple[KT, VT_co]]]
+ ) -> None: ...
+ @overload
+ def __init__(
+ self: Map[Union[KT, str], VT_co],
+ __col: Union[IterableItems[KT, VT_co], Iterable[Tuple[KT, VT_co]]],
+ **kw: VT_co
+ ) -> None: ...
+ def __reduce__(self) -> Tuple[Type[Map[KT, VT_co]], Tuple[Dict[KT, VT_co]]]: ...
+ def __len__(self) -> int: ...
+ def __eq__(self, other: Any) -> bool: ...
+ @overload
+ def update(
+ self,
+ __col: Union[IterableItems[KT, VT_co], Iterable[Tuple[KT, VT_co]]]
+ ) -> Map[KT, VT_co]: ...
+ @overload
+ def update(
+ self: Map[Union[HT, str], Any],
+ __col: Union[IterableItems[KT, VT_co], Iterable[Tuple[KT, VT_co]]],
+ **kw: VT_co # type: ignore[misc]
+ ) -> Map[KT, VT_co]: ...
+ @overload
+ def update(
+ self: Map[Union[HT, str], Any],
+ **kw: VT_co # type: ignore[misc]
+ ) -> Map[KT, VT_co]: ...
+ def mutate(self) -> MapMutation[KT, VT_co]: ...
+ def set(self, key: KT, val: VT_co) -> Map[KT, VT_co]: ... # type: ignore[misc]
+ def delete(self, key: KT) -> Map[KT, VT_co]: ...
+ @overload
+ def get(self, key: KT) -> Optional[VT_co]: ...
+ @overload
+ def get(self, key: KT, default: Union[VT_co, T]) -> Union[VT_co, T]: ...
+ def __getitem__(self, key: KT) -> VT_co: ...
+ def __contains__(self, key: Any) -> bool: ...
+ def __iter__(self) -> Iterator[KT]: ...
+ def keys(self) -> MapKeys[KT]: ... # type: ignore[override]
+ def values(self) -> MapValues[VT_co]: ... # type: ignore[override]
+ def items(self) -> MapItems[KT, VT_co]: ... # type: ignore[override]
+ def __hash__(self) -> int: ...
+ def __dump__(self) -> str: ...
+ def __class_getitem__(cls, item: Any) -> Type[Map[Any, Any]]: ...
diff --git a/immutables/_protocols.py b/immutables/_protocols.py
new file mode 100644
index 0000000..de87d23
--- /dev/null
+++ b/immutables/_protocols.py
@@ -0,0 +1,85 @@
+import sys
+from typing import Any
+from typing import Hashable
+from typing import Iterable
+from typing import Iterator
+from typing import NoReturn
+from typing import Optional
+from typing import Tuple
+from typing import TypeVar
+from typing import Union
+from typing import overload
+
+if sys.version_info >= (3, 8):
+ from typing import Protocol
+ from typing import TYPE_CHECKING
+else:
+ from typing_extensions import Protocol
+ from typing_extensions import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._map import Map
+
+HT = TypeVar('HT', bound=Hashable)
+KT = TypeVar('KT', bound=Hashable)
+KT_co = TypeVar('KT_co', covariant=True)
+MM = TypeVar('MM', bound='MapMutation[Any, Any]')
+T = TypeVar('T')
+VT = TypeVar('VT')
+VT_co = TypeVar('VT_co', covariant=True)
+
+
+class MapKeys(Protocol[KT_co]):
+ def __len__(self) -> int: ...
+ def __iter__(self) -> Iterator[KT_co]: ...
+
+
+class MapValues(Protocol[VT_co]):
+ def __len__(self) -> int: ...
+ def __iter__(self) -> Iterator[VT_co]: ...
+
+
+class MapItems(Protocol[KT_co, VT_co]):
+ def __len__(self) -> int: ...
+ def __iter__(self) -> Iterator[Tuple[KT_co, VT_co]]: ...
+
+
+class IterableItems(Protocol[KT_co, VT_co]):
+ def items(self) -> Iterable[Tuple[KT_co, VT_co]]: ...
+
+
+class MapMutation(Protocol[KT, VT]):
+ def set(self, key: KT, val: VT) -> None: ...
+ def __enter__(self: MM) -> MM: ...
+ def __exit__(self, *exc: Any) -> bool: ...
+ def __iter__(self) -> NoReturn: ...
+ def __delitem__(self, key: KT) -> None: ...
+ def __setitem__(self, key: KT, val: VT) -> None: ...
+ @overload
+ def pop(self, __key: KT) -> VT: ...
+ @overload
+ def pop(self, __key: KT, __default: T) -> Union[VT, T]: ...
+ @overload
+ def get(self, key: KT) -> Optional[VT]: ...
+ @overload
+ def get(self, key: KT, default: Union[VT, T]) -> Union[VT, T]: ...
+ def __getitem__(self, key: KT) -> VT: ...
+ def __contains__(self, key: object) -> bool: ...
+
+ @overload
+ def update(
+ self,
+ __col: Union[IterableItems[KT, VT], Iterable[Tuple[KT, VT]]]
+ ) -> None: ...
+
+ @overload
+ def update(
+ self: 'MapMutation[Union[HT, str], Any]',
+ __col: Union[IterableItems[KT, VT], Iterable[Tuple[KT, VT]]],
+ **kw: VT
+ ) -> None: ...
+ @overload
+ def update(self: 'MapMutation[Union[HT, str], Any]', **kw: VT) -> None: ...
+ def finish(self) -> 'Map[KT, VT]': ...
+ def __len__(self) -> int: ...
+ def __eq__(self, other: Any) -> bool: ...
diff --git a/immutables/_testutils.py b/immutables/_testutils.py
new file mode 100644
index 0000000..3f174b2
--- /dev/null
+++ b/immutables/_testutils.py
@@ -0,0 +1,80 @@
+class HashKey:
+ _crasher = None
+
+ def __init__(self, hash, name, *, error_on_eq_to=None):
+ assert hash != -1
+ self.name = name
+ self.hash = hash
+ self.error_on_eq_to = error_on_eq_to
+
+ def __repr__(self):
+ if self._crasher is not None and self._crasher.error_on_repr:
+ raise ReprError
+ return '<Key name:{} hash:{}>'.format(self.name, self.hash)
+
+ def __hash__(self):
+ if self._crasher is not None and self._crasher.error_on_hash:
+ raise HashingError
+
+ return self.hash
+
+ def __eq__(self, other):
+ if not isinstance(other, HashKey):
+ return NotImplemented
+
+ if self._crasher is not None and self._crasher.error_on_eq:
+ raise EqError
+
+ if self.error_on_eq_to is not None and self.error_on_eq_to is other:
+ raise ValueError('cannot compare {!r} to {!r}'.format(self, other))
+ if other.error_on_eq_to is not None and other.error_on_eq_to is self:
+ raise ValueError('cannot compare {!r} to {!r}'.format(other, self))
+
+ return (self.name, self.hash) == (other.name, other.hash)
+
+
+class KeyStr(str):
+
+ def __hash__(self):
+ if HashKey._crasher is not None and HashKey._crasher.error_on_hash:
+ raise HashingError
+ return super().__hash__()
+
+ def __eq__(self, other):
+ if HashKey._crasher is not None and HashKey._crasher.error_on_eq:
+ raise EqError
+ return super().__eq__(other)
+
+ def __repr__(self, other):
+ if HashKey._crasher is not None and HashKey._crasher.error_on_repr:
+ raise ReprError
+ return super().__eq__(other)
+
+
+class HashKeyCrasher:
+
+ def __init__(self, *, error_on_hash=False, error_on_eq=False,
+ error_on_repr=False):
+ self.error_on_hash = error_on_hash
+ self.error_on_eq = error_on_eq
+ self.error_on_repr = error_on_repr
+
+ def __enter__(self):
+ if HashKey._crasher is not None:
+ raise RuntimeError('cannot nest crashers')
+ HashKey._crasher = self
+
+ def __exit__(self, *exc):
+ HashKey._crasher = None
+
+
+class HashingError(Exception):
+ pass
+
+
+class EqError(Exception):
+ pass
+
+
+class ReprError(Exception):
+ pass
diff --git a/immutables/_version.py b/immutables/_version.py
new file mode 100644
index 0000000..b8cc76f
--- /dev/null
+++ b/immutables/_version.py
@@ -0,0 +1,13 @@
+# This file MUST NOT contain anything but the __version__ assignment.
+#
+# When making a release, change the value of __version__
+# to an appropriate value, and open a pull request against
+# the correct branch (master if making a new feature release).
+# The commit message MUST contain a properly formatted release
+# log, and the commit must be signed.
+#
+# The release automation will: build and test the packages for the
+# supported platforms, publish the packages on PyPI, merge the PR
+# to the target branch, create a Git tag pointing to the commit.
+
+__version__ = '0.18'
diff --git a/immutables/map.py b/immutables/map.py
new file mode 100644
index 0000000..0ad2858
--- /dev/null
+++ b/immutables/map.py
@@ -0,0 +1,855 @@
+import collections.abc
+import itertools
+import reprlib
+import sys
+
+
+__all__ = ('Map',)
+
+
+# Thread-safe counter.
+_mut_id = itertools.count(1).__next__
+
+
+# Python version of _map.c. The topmost comment there explains
+# all datastructures and algorithms.
+# The code here follows C code closely on purpose to make
+# debugging and testing easier.
+
+
+def map_hash(o):
+ x = hash(o)
+ if sys.hash_info.width > 32:
+ return (x & 0xffffffff) ^ ((x >> 32) & 0xffffffff)
+ else:
+ return x
+
+
+def map_mask(hash, shift):
+ return (hash >> shift) & 0x01f
+
+
+def map_bitpos(hash, shift):
+ return 1 << map_mask(hash, shift)
+
+
+def map_bitcount(v):
+ v = v - ((v >> 1) & 0x55555555)
+ v = (v & 0x33333333) + ((v >> 2) & 0x33333333)
+ v = (v & 0x0F0F0F0F) + ((v >> 4) & 0x0F0F0F0F)
+ v = v + (v >> 8)
+ v = (v + (v >> 16)) & 0x3F
+ return v
+
+
+def map_bitindex(bitmap, bit):
+ return map_bitcount(bitmap & (bit - 1))
+
+
+W_EMPTY, W_NEWNODE, W_NOT_FOUND = range(3)
+void = object()
+
+
+class _Unhashable:
+ __slots__ = ()
+ __hash__ = None
+
+
+_NULL = _Unhashable()
+del _Unhashable
+
+
+class BitmapNode:
+
+ def __init__(self, size, bitmap, array, mutid):
+ self.size = size
+ self.bitmap = bitmap
+ assert isinstance(array, list) and len(array) == size
+ self.array = array
+ self.mutid = mutid
+
+ def clone(self, mutid):
+ return BitmapNode(self.size, self.bitmap, self.array.copy(), mutid)
+
+ def assoc(self, shift, hash, key, val, mutid):
+ bit = map_bitpos(hash, shift)
+ idx = map_bitindex(self.bitmap, bit)
+
+ if self.bitmap & bit:
+ key_idx = 2 * idx
+ val_idx = key_idx + 1
+
+ key_or_null = self.array[key_idx]
+ val_or_node = self.array[val_idx]
+
+ if key_or_null is _NULL:
+ sub_node, added = val_or_node.assoc(
+ shift + 5, hash, key, val, mutid)
+ if val_or_node is sub_node:
+ return self, added
+
+ if mutid and mutid == self.mutid:
+ self.array[val_idx] = sub_node
+ return self, added
+ else:
+ ret = self.clone(mutid)
+ ret.array[val_idx] = sub_node
+ return ret, added
+
+ if key == key_or_null:
+ if val is val_or_node:
+ return self, False
+
+ if mutid and mutid == self.mutid:
+ self.array[val_idx] = val
+ return self, False
+ else:
+ ret = self.clone(mutid)
+ ret.array[val_idx] = val
+ return ret, False
+
+ existing_key_hash = map_hash(key_or_null)
+ if existing_key_hash == hash:
+ sub_node = CollisionNode(
+ 4, hash, [key_or_null, val_or_node, key, val], mutid)
+ else:
+ sub_node = BitmapNode(0, 0, [], mutid)
+ sub_node, _ = sub_node.assoc(
+ shift + 5, existing_key_hash,
+ key_or_null, val_or_node,
+ mutid)
+ sub_node, _ = sub_node.assoc(
+ shift + 5, hash, key, val,
+ mutid)
+
+ if mutid and mutid == self.mutid:
+ self.array[key_idx] = _NULL
+ self.array[val_idx] = sub_node
+ return self, True
+ else:
+ ret = self.clone(mutid)
+ ret.array[key_idx] = _NULL
+ ret.array[val_idx] = sub_node
+ return ret, True
+
+ else:
+ key_idx = 2 * idx
+ val_idx = key_idx + 1
+
+ n = map_bitcount(self.bitmap)
+
+ new_array = self.array[:key_idx]
+ new_array.append(key)
+ new_array.append(val)
+ new_array.extend(self.array[key_idx:])
+
+ if mutid and mutid == self.mutid:
+ self.size = 2 * (n + 1)
+ self.bitmap |= bit
+ self.array = new_array
+ return self, True
+ else:
+ return BitmapNode(
+ 2 * (n + 1), self.bitmap | bit, new_array, mutid), True
+
+ def find(self, shift, hash, key):
+ bit = map_bitpos(hash, shift)
+
+ if not (self.bitmap & bit):
+ raise KeyError
+
+ idx = map_bitindex(self.bitmap, bit)
+ key_idx = idx * 2
+ val_idx = key_idx + 1
+
+ key_or_null = self.array[key_idx]
+ val_or_node = self.array[val_idx]
+
+ if key_or_null is _NULL:
+ return val_or_node.find(shift + 5, hash, key)
+
+ if key == key_or_null:
+ return val_or_node
+
+ raise KeyError(key)
+
+ def without(self, shift, hash, key, mutid):
+ bit = map_bitpos(hash, shift)
+ if not (self.bitmap & bit):
+ return W_NOT_FOUND, None
+
+ idx = map_bitindex(self.bitmap, bit)
+ key_idx = 2 * idx
+ val_idx = key_idx + 1
+
+ key_or_null = self.array[key_idx]
+ val_or_node = self.array[val_idx]
+
+ if key_or_null is _NULL:
+ res, sub_node = val_or_node.without(shift + 5, hash, key, mutid)
+
+ if res is W_EMPTY:
+ raise RuntimeError('unreachable code') # pragma: no cover
+
+ elif res is W_NEWNODE:
+ if (type(sub_node) is BitmapNode and
+ sub_node.size == 2 and
+ sub_node.array[0] is not _NULL):
+
+ if mutid and mutid == self.mutid:
+ self.array[key_idx] = sub_node.array[0]
+ self.array[val_idx] = sub_node.array[1]
+ return W_NEWNODE, self
+ else:
+ clone = self.clone(mutid)
+ clone.array[key_idx] = sub_node.array[0]
+ clone.array[val_idx] = sub_node.array[1]
+ return W_NEWNODE, clone
+
+ if mutid and mutid == self.mutid:
+ self.array[val_idx] = sub_node
+ return W_NEWNODE, self
+ else:
+ clone = self.clone(mutid)
+ clone.array[val_idx] = sub_node
+ return W_NEWNODE, clone
+
+ else:
+ assert sub_node is None
+ return res, None
+
+ else:
+ if key == key_or_null:
+ if self.size == 2:
+ return W_EMPTY, None
+
+ new_array = self.array[:key_idx]
+ new_array.extend(self.array[val_idx + 1:])
+
+ if mutid and mutid == self.mutid:
+ self.size -= 2
+ self.bitmap &= ~bit
+ self.array = new_array
+ return W_NEWNODE, self
+ else:
+ new_node = BitmapNode(
+ self.size - 2, self.bitmap & ~bit, new_array, mutid)
+ return W_NEWNODE, new_node
+
+ else:
+ return W_NOT_FOUND, None
+
+ def keys(self):
+ for i in range(0, self.size, 2):
+ key_or_null = self.array[i]
+
+ if key_or_null is _NULL:
+ val_or_node = self.array[i + 1]
+ yield from val_or_node.keys()
+ else:
+ yield key_or_null
+
+ def values(self):
+ for i in range(0, self.size, 2):
+ key_or_null = self.array[i]
+ val_or_node = self.array[i + 1]
+
+ if key_or_null is _NULL:
+ yield from val_or_node.values()
+ else:
+ yield val_or_node
+
+ def items(self):
+ for i in range(0, self.size, 2):
+ key_or_null = self.array[i]
+ val_or_node = self.array[i + 1]
+
+ if key_or_null is _NULL:
+ yield from val_or_node.items()
+ else:
+ yield key_or_null, val_or_node
+
+ def dump(self, buf, level): # pragma: no cover
+ buf.append(
+ ' ' * (level + 1) +
+ 'BitmapNode(size={} count={} bitmap={} id={:0x}):'.format(
+ self.size, self.size / 2, bin(self.bitmap), id(self)))
+
+ for i in range(0, self.size, 2):
+ key_or_null = self.array[i]
+ val_or_node = self.array[i + 1]
+
+ pad = ' ' * (level + 2)
+
+ if key_or_null is _NULL:
+ buf.append(pad + 'NULL:')
+ val_or_node.dump(buf, level + 2)
+ else:
+ buf.append(pad + '{!r}: {!r}'.format(key_or_null, val_or_node))
+
+
+class CollisionNode:
+
+ def __init__(self, size, hash, array, mutid):
+ self.size = size
+ self.hash = hash
+ self.array = array
+ self.mutid = mutid
+
+ def find_index(self, key):
+ for i in range(0, self.size, 2):
+ if self.array[i] == key:
+ return i
+ return -1
+
+ def find(self, shift, hash, key):
+ for i in range(0, self.size, 2):
+ if self.array[i] == key:
+ return self.array[i + 1]
+ raise KeyError(key)
+
+ def assoc(self, shift, hash, key, val, mutid):
+ if hash == self.hash:
+ key_idx = self.find_index(key)
+
+ if key_idx == -1:
+ new_array = self.array.copy()
+ new_array.append(key)
+ new_array.append(val)
+
+ if mutid and mutid == self.mutid:
+ self.size += 2
+ self.array = new_array
+ return self, True
+ else:
+ new_node = CollisionNode(
+ self.size + 2, hash, new_array, mutid)
+ return new_node, True
+
+ val_idx = key_idx + 1
+ if self.array[val_idx] is val:
+ return self, False
+
+ if mutid and mutid == self.mutid:
+ self.array[val_idx] = val
+ return self, False
+ else:
+ new_array = self.array.copy()
+ new_array[val_idx] = val
+ return CollisionNode(self.size, hash, new_array, mutid), False
+
+ else:
+ new_node = BitmapNode(
+ 2, map_bitpos(self.hash, shift), [_NULL, self], mutid)
+ return new_node.assoc(shift, hash, key, val, mutid)
+
+ def without(self, shift, hash, key, mutid):
+ if hash != self.hash:
+ return W_NOT_FOUND, None
+
+ key_idx = self.find_index(key)
+ if key_idx == -1:
+ return W_NOT_FOUND, None
+
+ new_size = self.size - 2
+ if new_size == 0:
+ # Shouldn't be ever reachable
+ return W_EMPTY, None # pragma: no cover
+
+ if new_size == 2:
+ if key_idx == 0:
+ new_array = [self.array[2], self.array[3]]
+ else:
+ assert key_idx == 2
+ new_array = [self.array[0], self.array[1]]
+
+ new_node = BitmapNode(
+ 2, map_bitpos(hash, shift), new_array, mutid)
+ return W_NEWNODE, new_node
+
+ new_array = self.array[:key_idx]
+ new_array.extend(self.array[key_idx + 2:])
+ if mutid and mutid == self.mutid:
+ self.array = new_array
+ self.size -= 2
+ return W_NEWNODE, self
+ else:
+ new_node = CollisionNode(
+ self.size - 2, self.hash, new_array, mutid)
+ return W_NEWNODE, new_node
+
+ def keys(self):
+ for i in range(0, self.size, 2):
+ yield self.array[i]
+
+ def values(self):
+ for i in range(1, self.size, 2):
+ yield self.array[i]
+
+ def items(self):
+ for i in range(0, self.size, 2):
+ yield self.array[i], self.array[i + 1]
+
+ def dump(self, buf, level): # pragma: no cover
+ pad = ' ' * (level + 1)
+ buf.append(
+ pad + 'CollisionNode(size={} id={:0x}):'.format(
+ self.size, id(self)))
+
+ pad = ' ' * (level + 2)
+ for i in range(0, self.size, 2):
+ key = self.array[i]
+ val = self.array[i + 1]
+
+ buf.append('{}{!r}: {!r}'.format(pad, key, val))
+
+
+class MapKeys:
+
+ def __init__(self, c, m):
+ self.__count = c
+ self.__root = m
+
+ def __len__(self):
+ return self.__count
+
+ def __iter__(self):
+ return iter(self.__root.keys())
+
+
+class MapValues:
+
+ def __init__(self, c, m):
+ self.__count = c
+ self.__root = m
+
+ def __len__(self):
+ return self.__count
+
+ def __iter__(self):
+ return iter(self.__root.values())
+
+
+class MapItems:
+
+ def __init__(self, c, m):
+ self.__count = c
+ self.__root = m
+
+ def __len__(self):
+ return self.__count
+
+ def __iter__(self):
+ return iter(self.__root.items())
+
+
+class Map:
+
+ def __init__(self, *args, **kw):
+ if not args:
+ col = None
+ elif len(args) == 1:
+ col = args[0]
+ else:
+ raise TypeError(
+ "immutables.Map expected at most 1 arguments, "
+ "got {}".format(len(args))
+ )
+
+ self.__count = 0
+ self.__root = BitmapNode(0, 0, [], 0)
+ self.__hash = -1
+
+ if isinstance(col, Map):
+ self.__count = col.__count
+ self.__root = col.__root
+ self.__hash = col.__hash
+ col = None
+ elif isinstance(col, MapMutation):
+ raise TypeError('cannot create Maps from MapMutations')
+
+ if col or kw:
+ init = self.update(col, **kw)
+ self.__count = init.__count
+ self.__root = init.__root
+
+ @classmethod
+ def _new(cls, count, root):
+ m = Map.__new__(Map)
+ m.__count = count
+ m.__root = root
+ m.__hash = -1
+ return m
+
+ def __reduce__(self):
+ return (type(self), (dict(self.items()),))
+
+ def __len__(self):
+ return self.__count
+
+ def __eq__(self, other):
+ if not isinstance(other, Map):
+ return NotImplemented
+
+ if len(self) != len(other):
+ return False
+
+ for key, val in self.__root.items():
+ try:
+ oval = other.__root.find(0, map_hash(key), key)
+ except KeyError:
+ return False
+ else:
+ if oval != val:
+ return False
+
+ return True
+
+ def update(self, *args, **kw):
+ if not args:
+ col = None
+ elif len(args) == 1:
+ col = args[0]
+ else:
+ raise TypeError(
+ "update expected at most 1 arguments, got {}".format(len(args))
+ )
+
+ it = None
+
+ if col is not None:
+ if hasattr(col, 'items'):
+ it = iter(col.items())
+ else:
+ it = iter(col)
+
+ if it is not None:
+ if kw:
+ it = iter(itertools.chain(it, kw.items()))
+ else:
+ if kw:
+ it = iter(kw.items())
+
+ if it is None:
+
+ return self
+
+ mutid = _mut_id()
+ root = self.__root
+ count = self.__count
+
+ i = 0
+ while True:
+ try:
+ tup = next(it)
+ except StopIteration:
+ break
+
+ try:
+ tup = tuple(tup)
+ except TypeError:
+ raise TypeError(
+ 'cannot convert map update '
+ 'sequence element #{} to a sequence'.format(i)) from None
+ key, val, *r = tup
+ if r:
+ raise ValueError(
+ 'map update sequence element #{} has length '
+ '{}; 2 is required'.format(i, len(r) + 2))
+
+ root, added = root.assoc(0, map_hash(key), key, val, mutid)
+ if added:
+ count += 1
+
+ i += 1
+
+ return Map._new(count, root)
+
+ def mutate(self):
+ return MapMutation(self.__count, self.__root)
+
+ def set(self, key, val):
+ new_count = self.__count
+ new_root, added = self.__root.assoc(0, map_hash(key), key, val, 0)
+
+ if new_root is self.__root:
+ assert not added
+ return self
+
+ if added:
+ new_count += 1
+
+ return Map._new(new_count, new_root)
+
+ def delete(self, key):
+ res, node = self.__root.without(0, map_hash(key), key, 0)
+ if res is W_EMPTY:
+ return Map()
+ elif res is W_NOT_FOUND:
+ raise KeyError(key)
+ else:
+ return Map._new(self.__count - 1, node)
+
+ def get(self, key, default=None):
+ try:
+ return self.__root.find(0, map_hash(key), key)
+ except KeyError:
+ return default
+
+ def __getitem__(self, key):
+ return self.__root.find(0, map_hash(key), key)
+
+ def __contains__(self, key):
+ try:
+ self.__root.find(0, map_hash(key), key)
+ except KeyError:
+ return False
+ else:
+ return True
+
+ def __iter__(self):
+ yield from self.__root.keys()
+
+ def keys(self):
+ return MapKeys(self.__count, self.__root)
+
+ def values(self):
+ return MapValues(self.__count, self.__root)
+
+ def items(self):
+ return MapItems(self.__count, self.__root)
+
+ def __hash__(self):
+ if self.__hash != -1:
+ return self.__hash
+
+ MAX = sys.maxsize
+ MASK = 2 * MAX + 1
+
+ h = 1927868237 * (self.__count * 2 + 1)
+ h &= MASK
+
+ for key, value in self.__root.items():
+ hx = hash(key)
+ h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
+ h &= MASK
+
+ hx = hash(value)
+ h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
+ h &= MASK
+
+ h = h * 69069 + 907133923
+ h &= MASK
+
+ if h > MAX:
+ h -= MASK + 1 # pragma: no cover
+ if h == -1:
+ h = 590923713 # pragma: no cover
+
+ self.__hash = h
+ return h
+
+ @reprlib.recursive_repr("{...}")
+ def __repr__(self):
+ items = []
+ for key, val in self.items():
+ items.append("{!r}: {!r}".format(key, val))
+ return 'immutables.Map({{{}}})'.format(', '.join(items))
+
+ def __dump__(self): # pragma: no cover
+ buf = []
+ self.__root.dump(buf, 0)
+ return '\n'.join(buf)
+
+ def __class_getitem__(cls, item):
+ return cls
+
+
+class MapMutation:
+
+ def __init__(self, count, root):
+ self.__count = count
+ self.__root = root
+ self.__mutid = _mut_id()
+
+ def set(self, key, val):
+ self[key] = val
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc):
+ self.finish()
+ return False
+
+ def __iter__(self):
+ raise TypeError('{} is not iterable'.format(type(self)))
+
+ def __delitem__(self, key):
+ if self.__mutid == 0:
+ raise ValueError('mutation {!r} has been finished'.format(self))
+
+ res, new_root = self.__root.without(
+ 0, map_hash(key), key, self.__mutid)
+ if res is W_EMPTY:
+ self.__count = 0
+ self.__root = BitmapNode(0, 0, [], self.__mutid)
+ elif res is W_NOT_FOUND:
+ raise KeyError(key)
+ else:
+ self.__root = new_root
+ self.__count -= 1
+
+ def __setitem__(self, key, val):
+ if self.__mutid == 0:
+ raise ValueError('mutation {!r} has been finished'.format(self))
+
+ self.__root, added = self.__root.assoc(
+ 0, map_hash(key), key, val, self.__mutid)
+
+ if added:
+ self.__count += 1
+
+ def pop(self, key, *args):
+ if self.__mutid == 0:
+ raise ValueError('mutation {!r} has been finished'.format(self))
+
+ if len(args) > 1:
+ raise TypeError(
+ 'pop() accepts 1 to 2 positional arguments, '
+ 'got {}'.format(len(args) + 1))
+ elif len(args) == 1:
+ default = args[0]
+ else:
+ default = void
+
+ val = self.get(key, default)
+
+ try:
+ del self[key]
+ except KeyError:
+ if val is void:
+ raise
+ return val
+ else:
+ assert val is not void
+ return val
+
+ def get(self, key, default=None):
+ try:
+ return self.__root.find(0, map_hash(key), key)
+ except KeyError:
+ return default
+
+ def __getitem__(self, key):
+ return self.__root.find(0, map_hash(key), key)
+
+ def __contains__(self, key):
+ try:
+ self.__root.find(0, map_hash(key), key)
+ except KeyError:
+ return False
+ else:
+ return True
+
+ def update(self, *args, **kw):
+ if not args:
+ col = None
+ elif len(args) == 1:
+ col = args[0]
+ else:
+ raise TypeError(
+ "update expected at most 1 arguments, got {}".format(len(args))
+ )
+
+ if self.__mutid == 0:
+ raise ValueError('mutation {!r} has been finished'.format(self))
+
+ it = None
+ if col is not None:
+ if hasattr(col, 'items'):
+ it = iter(col.items())
+ else:
+ it = iter(col)
+
+ if it is not None:
+ if kw:
+ it = iter(itertools.chain(it, kw.items()))
+ else:
+ if kw:
+ it = iter(kw.items())
+
+ if it is None:
+ return
+
+ root = self.__root
+ count = self.__count
+
+ i = 0
+ while True:
+ try:
+ tup = next(it)
+ except StopIteration:
+ break
+
+ try:
+ tup = tuple(tup)
+ except TypeError:
+ raise TypeError(
+ 'cannot convert map update '
+ 'sequence element #{} to a sequence'.format(i)) from None
+ key, val, *r = tup
+ if r:
+ raise ValueError(
+ 'map update sequence element #{} has length '
+ '{}; 2 is required'.format(i, len(r) + 2))
+
+ root, added = root.assoc(0, map_hash(key), key, val, self.__mutid)
+ if added:
+ count += 1
+
+ i += 1
+
+ self.__root = root
+ self.__count = count
+
+ def finish(self):
+ self.__mutid = 0
+ return Map._new(self.__count, self.__root)
+
+ @reprlib.recursive_repr("{...}")
+ def __repr__(self):
+ items = []
+ for key, val in self.__root.items():
+ items.append("{!r}: {!r}".format(key, val))
+ return 'immutables.MapMutation({{{}}})'.format(', '.join(items))
+
+ def __len__(self):
+ return self.__count
+
+ def __reduce__(self):
+ raise TypeError("can't pickle {} objects".format(type(self).__name__))
+
+ def __hash__(self):
+ raise TypeError('unhashable type: {}'.format(type(self).__name__))
+
+ def __eq__(self, other):
+ if not isinstance(other, MapMutation):
+ return NotImplemented
+
+ if len(self) != len(other):
+ return False
+
+ for key, val in self.__root.items():
+ try:
+ oval = other.__root.find(0, map_hash(key), key)
+ except KeyError:
+ return False
+ else:
+ if oval != val:
+ return False
+
+ return True
+
+
+collections.abc.Mapping.register(Map)
diff --git a/immutables/py.typed b/immutables/py.typed
new file mode 100644
index 0000000..b648ac9
--- /dev/null
+++ b/immutables/py.typed
@@ -0,0 +1 @@
+partial
diff --git a/immutables/pythoncapi_compat.h b/immutables/pythoncapi_compat.h
new file mode 100644
index 0000000..2837467
--- /dev/null
+++ b/immutables/pythoncapi_compat.h
@@ -0,0 +1,446 @@
+// Header file providing new C API functions to old Python versions.
+//
+// File distributed under the Zero Clause BSD (0BSD) license.
+// Copyright Contributors to the pythoncapi_compat project.
+//
+// Homepage:
+// https://github.com/python/pythoncapi_compat
+//
+// Latest version:
+// https://raw.githubusercontent.com/python/pythoncapi_compat/master/pythoncapi_compat.h
+//
+// SPDX-License-Identifier: 0BSD
+
+#ifndef PYTHONCAPI_COMPAT
+#define PYTHONCAPI_COMPAT
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <Python.h>
+#include "frameobject.h" // PyFrameObject, PyFrame_GetBack()
+
+
+// Compatibility with Visual Studio 2013 and older which don't support
+// the inline keyword in C (only in C++): use __inline instead.
+#if (defined(_MSC_VER) && _MSC_VER < 1900 \
+ && !defined(__cplusplus) && !defined(inline))
+# define PYCAPI_COMPAT_INLINE(TYPE static __inline TYPE
+#else
+# define PYCAPI_COMPAT_STATIC_INLINE(TYPE) static inline TYPE
+#endif
+
+
+// C++ compatibility
+#ifdef __cplusplus
+# define PYCAPI_COMPAT_CAST(TYPE, EXPR) reinterpret_cast<TYPE>(EXPR)
+# define PYCAPI_COMPAT_NULL nullptr
+#else
+# define PYCAPI_COMPAT_CAST(TYPE, EXPR) ((TYPE)(EXPR))
+# define PYCAPI_COMPAT_NULL NULL
+#endif
+
+// Cast argument to PyObject* type.
+#ifndef _PyObject_CAST
+# define _PyObject_CAST(op) PYCAPI_COMPAT_CAST(PyObject*, op)
+#endif
+#ifndef _PyObject_CAST_CONST
+# define _PyObject_CAST_CONST(op) PYCAPI_COMPAT_CAST(const PyObject*, op)
+#endif
+
+
+// bpo-42262 added Py_NewRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef)
+PYCAPI_COMPAT_STATIC_INLINE(PyObject*)
+_Py_NewRef(PyObject *obj)
+{
+ Py_INCREF(obj);
+ return obj;
+}
+#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj))
+#endif
+
+
+// bpo-42262 added Py_XNewRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_XNewRef)
+PYCAPI_COMPAT_STATIC_INLINE(PyObject*)
+_Py_XNewRef(PyObject *obj)
+{
+ Py_XINCREF(obj);
+ return obj;
+}
+#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj))
+#endif
+
+
+// See https://bugs.python.org/issue42522
+#if !defined(_Py_StealRef)
+PYCAPI_COMPAT_STATIC_INLINE(PyObject*)
+__Py_StealRef(PyObject *obj)
+{
+ Py_DECREF(obj);
+ return obj;
+}
+#define _Py_StealRef(obj) __Py_StealRef(_PyObject_CAST(obj))
+#endif
+
+
+// See https://bugs.python.org/issue42522
+#if !defined(_Py_XStealRef)
+PYCAPI_COMPAT_STATIC_INLINE(PyObject*)
+__Py_XStealRef(PyObject *obj)
+{
+ Py_XDECREF(obj);
+ return obj;
+}
+#define _Py_XStealRef(obj) __Py_XStealRef(_PyObject_CAST(obj))
+#endif
+
+
+// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT)
+PYCAPI_COMPAT_STATIC_INLINE(void)
+_Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt)
+{
+ ob->ob_refcnt = refcnt;
+}
+#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT(_PyObject_CAST(ob), refcnt)
+#endif
+
+
+// Py_SETREF() and Py_XSETREF() were added to Python 3.5.2.
+// It is excluded from the limited C API.
+#if (PY_VERSION_HEX < 0x03050200 && !defined(Py_SETREF)) && !defined(Py_LIMITED_API)
+#define Py_SETREF(op, op2) \
+ do { \
+ PyObject *_py_tmp = _PyObject_CAST(op); \
+ (op) = (op2); \
+ Py_DECREF(_py_tmp); \
+ } while (0)
+
+#define Py_XSETREF(op, op2) \
+ do { \
+ PyObject *_py_tmp = _PyObject_CAST(op); \
+ (op) = (op2); \
+ Py_XDECREF(_py_tmp); \
+ } while (0)
+#endif
+
+
+// bpo-43753 added Py_Is(), Py_IsNone(), Py_IsTrue() and Py_IsFalse()
+// to Python 3.10.0b1.
+#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_Is)
+# define Py_Is(x, y) ((x) == (y))
+#endif
+#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsNone)
+# define Py_IsNone(x) Py_Is(x, Py_None)
+#endif
+#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsTrue)
+# define Py_IsTrue(x) Py_Is(x, Py_True)
+#endif
+#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsFalse)
+# define Py_IsFalse(x) Py_Is(x, Py_False)
+#endif
+
+
+// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE)
+PYCAPI_COMPAT_STATIC_INLINE(void)
+_Py_SET_TYPE(PyObject *ob, PyTypeObject *type)
+{
+ ob->ob_type = type;
+}
+#define Py_SET_TYPE(ob, type) _Py_SET_TYPE(_PyObject_CAST(ob), type)
+#endif
+
+
+// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE)
+PYCAPI_COMPAT_STATIC_INLINE(void)
+_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size)
+{
+ ob->ob_size = size;
+}
+#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size)
+#endif
+
+
+// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1
+PYCAPI_COMPAT_STATIC_INLINE(PyCodeObject*)
+PyFrame_GetCode(PyFrameObject *frame)
+{
+ assert(frame != PYCAPI_COMPAT_NULL);
+ assert(frame->f_code != PYCAPI_COMPAT_NULL);
+ return PYCAPI_COMPAT_CAST(PyCodeObject*, Py_NewRef(frame->f_code));
+}
+#endif
+
+PYCAPI_COMPAT_STATIC_INLINE(PyCodeObject*)
+_PyFrame_GetCodeBorrow(PyFrameObject *frame)
+{
+ return PYCAPI_COMPAT_CAST(PyCodeObject *,
+ _Py_StealRef(PyFrame_GetCode(frame)));
+}
+
+
+// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*)
+PyFrame_GetBack(PyFrameObject *frame)
+{
+ assert(frame != PYCAPI_COMPAT_NULL);
+ return PYCAPI_COMPAT_CAST(PyFrameObject*, Py_XNewRef(frame->f_back));
+}
+#endif
+
+#if !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*)
+_PyFrame_GetBackBorrow(PyFrameObject *frame)
+{
+ return PYCAPI_COMPAT_CAST(PyFrameObject *,
+ _Py_XStealRef(PyFrame_GetBack(frame)));
+}
+#endif
+
+
+// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+PYCAPI_COMPAT_STATIC_INLINE(PyInterpreterState *)
+PyThreadState_GetInterpreter(PyThreadState *tstate)
+{
+ assert(tstate != PYCAPI_COMPAT_NULL);
+ return tstate->interp;
+}
+#endif
+
+
+// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*)
+PyThreadState_GetFrame(PyThreadState *tstate)
+{
+ assert(tstate != PYCAPI_COMPAT_NULL);
+ return PYCAPI_COMPAT_CAST(PyFrameObject *, Py_XNewRef(tstate->frame));
+}
+#endif
+
+#if !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*)
+_PyThreadState_GetFrameBorrow(PyThreadState *tstate)
+{
+ return PYCAPI_COMPAT_CAST(PyFrameObject*,
+ _Py_XStealRef(PyThreadState_GetFrame(tstate)));
+}
+#endif
+
+
+// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+PYCAPI_COMPAT_STATIC_INLINE(PyInterpreterState*)
+PyInterpreterState_Get(void)
+{
+ PyThreadState *tstate;
+ PyInterpreterState *interp;
+
+ tstate = PyThreadState_GET();
+ if (tstate == PYCAPI_COMPAT_NULL) {
+ Py_FatalError("GIL released (tstate is NULL)");
+ }
+ interp = tstate->interp;
+ if (interp == PYCAPI_COMPAT_NULL) {
+ Py_FatalError("no current interpreter");
+ }
+ return interp;
+}
+#endif
+
+
+// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6
+#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(uint64_t)
+PyThreadState_GetID(PyThreadState *tstate)
+{
+ assert(tstate != PYCAPI_COMPAT_NULL);
+ return tstate->id;
+}
+#endif
+
+// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2
+#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(void)
+PyThreadState_EnterTracing(PyThreadState *tstate)
+{
+ tstate->tracing++;
+#if PY_VERSION_HEX >= 0x030A00A1
+ tstate->cframe->use_tracing = 0;
+#else
+ tstate->use_tracing = 0;
+#endif
+}
+#endif
+
+// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2
+#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(void)
+PyThreadState_LeaveTracing(PyThreadState *tstate)
+{
+ int use_tracing = (tstate->c_tracefunc != PYCAPI_COMPAT_NULL
+ || tstate->c_profilefunc != PYCAPI_COMPAT_NULL);
+ tstate->tracing--;
+#if PY_VERSION_HEX >= 0x030A00A1
+ tstate->cframe->use_tracing = use_tracing;
+#else
+ tstate->use_tracing = use_tracing;
+#endif
+}
+#endif
+
+
+// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1
+#if PY_VERSION_HEX < 0x030900A1
+PYCAPI_COMPAT_STATIC_INLINE(PyObject*)
+PyObject_CallNoArgs(PyObject *func)
+{
+ return PyObject_CallFunctionObjArgs(func, NULL);
+}
+#endif
+
+
+// bpo-39245 made PyObject_CallOneArg() public (previously called
+// _PyObject_CallOneArg) in Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4
+PYCAPI_COMPAT_STATIC_INLINE(PyObject*)
+PyObject_CallOneArg(PyObject *func, PyObject *arg)
+{
+ return PyObject_CallFunctionObjArgs(func, arg, NULL);
+}
+#endif
+
+
+// bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030A00A3
+PYCAPI_COMPAT_STATIC_INLINE(int)
+PyModule_AddObjectRef(PyObject *mod, const char *name, PyObject *value)
+{
+ int res;
+ Py_XINCREF(value);
+ res = PyModule_AddObject(mod, name, value);
+ if (res < 0) {
+ Py_XDECREF(value);
+ }
+ return res;
+}
+#endif
+
+
+// bpo-40024 added PyModule_AddType() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+PYCAPI_COMPAT_STATIC_INLINE(int)
+PyModule_AddType(PyObject *mod, PyTypeObject *type)
+{
+ const char *name, *dot;
+
+ if (PyType_Ready(type) < 0) {
+ return -1;
+ }
+
+ // inline _PyType_Name()
+ name = type->tp_name;
+ assert(name != PYCAPI_COMPAT_NULL);
+ dot = strrchr(name, '.');
+ if (dot != PYCAPI_COMPAT_NULL) {
+ name = dot + 1;
+ }
+
+ return PyModule_AddObjectRef(mod, name, _PyObject_CAST(type));
+}
+#endif
+
+
+// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6.
+// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2.
+#if PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(int)
+PyObject_GC_IsTracked(PyObject* obj)
+{
+ return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj));
+}
+#endif
+
+// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6.
+// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final.
+#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(int)
+PyObject_GC_IsFinalized(PyObject *obj)
+{
+ PyGC_Head *gc = PYCAPI_COMPAT_CAST(PyGC_Head *, obj) - 1;
+ return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED(gc));
+}
+#endif
+
+
+// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE)
+PYCAPI_COMPAT_STATIC_INLINE(int)
+_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) {
+ return ob->ob_type == type;
+}
+#define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST_CONST(ob), type)
+#endif
+
+
+// bpo-46906 added PyFloat_Pack2() and PyFloat_Unpack2() to Python 3.11a7.
+// bpo-11734 added _PyFloat_Pack2() and _PyFloat_Unpack2() to Python 3.6.0b1.
+// Python 3.11a2 moved _PyFloat_Pack2() and _PyFloat_Unpack2() to the internal
+// C API: Python 3.11a2-3.11a6 versions are not supported.
+#if 0x030600B1 <= PY_VERSION_HEX && PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(int)
+PyFloat_Pack2(double x, char *p, int le)
+{ return _PyFloat_Pack2(x, (unsigned char*)p, le); }
+
+PYCAPI_COMPAT_STATIC_INLINE(double)
+PyFloat_Unpack2(const char *p, int le)
+{ return _PyFloat_Unpack2((const unsigned char *)p, le); }
+#endif
+
+
+// bpo-46906 added PyFloat_Pack4(), PyFloat_Pack8(), PyFloat_Unpack4() and
+// PyFloat_Unpack8() to Python 3.11a7.
+// Python 3.11a2 moved _PyFloat_Pack4(), _PyFloat_Pack8(), _PyFloat_Unpack4()
+// and _PyFloat_Unpack8() to the internal C API: Python 3.11a2-3.11a6 versions
+// are not supported.
+#if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION)
+PYCAPI_COMPAT_STATIC_INLINE(int)
+PyFloat_Pack4(double x, char *p, int le)
+{ return _PyFloat_Pack4(x, (unsigned char*)p, le); }
+
+PYCAPI_COMPAT_STATIC_INLINE(int)
+PyFloat_Pack8(double x, char *p, int le)
+{ return _PyFloat_Pack8(x, (unsigned char*)p, le); }
+
+PYCAPI_COMPAT_STATIC_INLINE(double)
+PyFloat_Unpack4(const char *p, int le)
+{ return _PyFloat_Unpack4((const unsigned char *)p, le); }
+
+PYCAPI_COMPAT_STATIC_INLINE(double)
+PyFloat_Unpack8(const char *p, int le)
+{ return _PyFloat_Unpack8((const unsigned char *)p, le); }
+#endif
+
+
+// Py_UNUSED() was added to Python 3.4.0b2.
+#if PY_VERSION_HEX < 0x030400B2 && !defined(Py_UNUSED)
+# if defined(__GNUC__) || defined(__clang__)
+# define Py_UNUSED(name) _unused_ ## name __attribute__((unused))
+# else
+# define Py_UNUSED(name) _unused_ ## name
+# endif
+#endif
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif // PYTHONCAPI_COMPAT
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..e9018e7
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,25 @@
+[project]
+name = 'immutables'
+requires-python = ">=3.6"
+
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.pytest.ini_options]
+minversion = "6.0"
+addopts = "--capture=no --assert=plain --strict-markers --tb=native --import-mode=importlib"
+testpaths = "tests"
+filterwarnings = "default"
+
+[tool.mypy]
+incremental = true
+strict = true
+
+[[tool.mypy.overrides]]
+module = "immutables.map"
+ignore_errors = true
+
+[[tool.mypy.overrides]]
+module = "immutables._testutils"
+ignore_errors = true
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..09b8de4
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,93 @@
+import os
+import platform
+import setuptools
+
+
+# Minimal dependencies required to test immutables.
+TEST_DEPENDENCIES = [
+ # pycodestyle is a dependency of flake8, but it must be frozen because
+ # their combination breaks too often
+ # (example breakage: https://gitlab.com/pycqa/flake8/issues/427)
+ 'flake8~=3.8.4',
+ 'pycodestyle~=2.6.0',
+ 'mypy==0.942',
+ 'pytest~=6.2.4',
+]
+
+EXTRA_DEPENDENCIES = {
+ 'test': TEST_DEPENDENCIES,
+}
+
+CFLAGS = ['-O2']
+if platform.uname().system != 'Windows':
+ CFLAGS.extend(['-std=c99', '-fsigned-char', '-Wall',
+ '-Wsign-compare', '-Wconversion'])
+
+
+with open(os.path.join(
+ os.path.dirname(__file__), 'immutables', '_version.py')) as f:
+ for line in f:
+ if line.startswith('__version__ ='):
+ _, _, version = line.partition('=')
+ VERSION = version.strip(" \n'\"")
+ break
+ else:
+ raise RuntimeError(
+ 'unable to read the version from immutables/_version.py')
+
+
+if platform.python_implementation() == 'CPython':
+ if os.environ.get("DEBUG_IMMUTABLES") == '1':
+ define_macros = []
+ undef_macros = ['NDEBUG']
+ else:
+ define_macros = [('NDEBUG', '1')]
+ undef_macros = []
+
+ ext_modules = [
+ setuptools.Extension(
+ "immutables._map",
+ ["immutables/_map.c"],
+ extra_compile_args=CFLAGS,
+ define_macros=define_macros,
+ undef_macros=undef_macros)
+ ]
+else:
+ ext_modules = []
+
+
+with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
+ readme = f.read()
+
+
+setuptools.setup(
+ name='immutables',
+ version=VERSION,
+ description='Immutable Collections',
+ long_description=readme,
+ python_requires='>=3.6',
+ classifiers=[
+ 'License :: OSI Approved :: Apache Software License',
+ 'Intended Audience :: Developers',
+ 'Programming Language :: Python :: 3 :: Only',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
+ 'Operating System :: POSIX',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Operating System :: Microsoft :: Windows',
+ ],
+ author='MagicStack Inc',
+ author_email='hello@magic.io',
+ url='https://github.com/MagicStack/immutables',
+ license='Apache License, Version 2.0',
+ packages=['immutables'],
+ package_data={"immutables": ["py.typed", "*.pyi"]},
+ provides=['immutables'],
+ include_package_data=True,
+ ext_modules=ext_modules,
+ install_requires=['typing-extensions>=3.7.4.3;python_version<"3.8"'],
+ extras_require=EXTRA_DEPENDENCIES,
+)
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..450c573
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,16 @@
+import os.path
+import sys
+import unittest
+
+
+def suite():
+ test_loader = unittest.TestLoader()
+ test_suite = test_loader.discover(os.path.dirname(__file__),
+ pattern='test_*.py')
+ return test_suite
+
+
+if __name__ == '__main__':
+ runner = unittest.runner.TextTestRunner()
+ result = runner.run(suite())
+ sys.exit(not result.wasSuccessful())
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..9eb9e61
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,13 @@
+# We need the mypy pytest plugin to do the test collection for our
+# typing tests.
+
+# mypy demands that its test-data be present for mypy.test.config to be
+# imported, so thwart that check. mypy PR #10919 fixes this.
+import unittest.mock
+with unittest.mock.patch('os.path.isdir') as isdir:
+ isdir.return_value = True
+ import mypy.test.config # noqa
+
+pytest_plugins = [
+ 'mypy.test.data',
+]
diff --git a/tests/test-data/check-immu.test b/tests/test-data/check-immu.test
new file mode 100644
index 0000000..4998542
--- /dev/null
+++ b/tests/test-data/check-immu.test
@@ -0,0 +1,73 @@
+[case testMypyImmu]
+# cmd: mypy test.py
+[file test.py]
+from immutables import Map
+from typing import Dict, Union, Any, cast
+
+def init() -> None:
+ def thing(m: Map[str, Union[str, int]]) -> None:
+ ...
+
+ thing(Map(foo=1))
+ thing(Map(foo='bar', baz=1))
+ thing(Map([('foo', 'bar'), ('bar', 1)]))
+ thing(Map(Map(foo=1), bar='foo'))
+ m = Map({1: 2})
+ thing(m) # E: Argument 1 to "thing" has incompatible type "Map[int, int]"; expected "Map[str, Union[str, int]]"
+
+def assignments() -> None:
+ m_int__str = Map[int, str]()
+ m_str__str = Map[str, str]()
+ m_int_str__str = Map[Union[int, str], str]()
+ m_str__int_str = Map[str, Union[int, str]]()
+
+ m_int__str = m_str__str # E: Incompatible types in assignment (expression has type "Map[str, str]", variable has type "Map[int, str]")
+ m_int__str = m_int_str__str # E: Incompatible types in assignment (expression has type "Map[Union[int, str], str]", variable has type "Map[int, str]")
+ m_int__str = m_str__int_str # E: Incompatible types in assignment (expression has type "Map[str, Union[int, str]]", variable has type "Map[int, str]")
+
+ m_str__str = m_int__str # E: Incompatible types in assignment (expression has type "Map[int, str]", variable has type "Map[str, str]")
+ m_str__str = m_int_str__str # E: Incompatible types in assignment (expression has type "Map[Union[int, str], str]", variable has type "Map[str, str]")
+ m_str__str = m_str__int_str # E: Incompatible types in assignment (expression has type "Map[str, Union[int, str]]", variable has type "Map[str, str]")
+
+ m_int_str__str = m_int__str # E: Incompatible types in assignment (expression has type "Map[int, str]", variable has type "Map[Union[int, str], str]")
+ m_int_str__str = m_str__str # E: Incompatible types in assignment (expression has type "Map[str, str]", variable has type "Map[Union[int, str], str]")
+ m_int_str__str = m_str__int_str # E: Incompatible types in assignment (expression has type "Map[str, Union[int, str]]", variable has type "Map[Union[int, str], str]")
+
+ m_str__int_str = m_int__str # E: Incompatible types in assignment (expression has type "Map[int, str]", variable has type "Map[str, Union[int, str]]")
+ m_str__int_str = m_int_str__str # E: Incompatible types in assignment (expression has type "Map[Union[int, str], str]", variable has type "Map[str, Union[int, str]]")
+ m_str__int_str = m_str__str
+
+def update() -> None:
+ m_int__str: Map[int, str] = Map()
+ m_str__str: Map[str, str] = Map()
+ m_int_str__str: Map[Union[int, str], str] = Map()
+ m_str__int_str: Map[str, Union[int, str]] = Map()
+
+ m_int__str.update({1: '2'})
+ m_int__str.update({1: '2'}, three='4') # E: Unexpected keyword argument "three" for "update" of "Map"
+ m_int__str.update({1: 2}) # E: Argument 1 to "update" of "Map" has incompatible type "Dict[int, int]"; expected "Union[IterableItems[int, str], Iterable[Tuple[int, str]]]"
+
+ m_str__str.update({'1': '2'})
+ m_str__str.update({'1': '2'}, three='4')
+ m_str__str.update({'1': 2}) # E: Argument 1 to "update" of "Map" has incompatible type "Dict[str, int]"; expected "Union[IterableItems[str, str], Iterable[Tuple[str, str]]]"
+
+ m_int_str__str.update(cast(Dict[Union[int, str], str], {1: '2', '3': '4'}))
+ m_int_str__str.update({1: '2'}, three='4')
+ m_int_str__str.update({'1': 2}) # E: Argument 1 to "update" of "Map" has incompatible type "Dict[str, int]"; expected "Union[IterableItems[Union[int, str], str], Iterable[Tuple[Union[int, str], str]]]"
+
+ m_str__int_str.update({'1': 2, '2': 3})
+ m_str__int_str.update({'1': 2, '2': 3}, four='5')
+ m_str__int_str.update({1: 2}) # E: Argument 1 to "update" of "Map" has incompatible type "Dict[int, int]"; expected "Union[IterableItems[str, Union[int, str]], Iterable[Tuple[str, Union[int, str]]]]"
+
+def mutate() -> None:
+ m = Map[str, str]()
+
+ with m.mutate() as mm:
+ mm[0] = '1' # E: Invalid index type "int" for "MapMutation[str, str]"; expected type "str"
+ mm['1'] = 0 # E: Incompatible types in assignment (expression has type "int", target has type "str")
+ mm['1'] = '2'
+ del mm['1']
+ mm.set('3', '4')
+ m2 = mm.finish()
+
+ reveal_type(m2) # N: Revealed type is "immutables._map.Map[builtins.str*, builtins.str*]"
diff --git a/tests/test_issue24.py b/tests/test_issue24.py
new file mode 100644
index 0000000..7d51e34
--- /dev/null
+++ b/tests/test_issue24.py
@@ -0,0 +1,156 @@
+import unittest
+
+from immutables.map import Map as PyMap, map_bitcount
+
+
+class CollisionKey:
+ def __hash__(self):
+ return 0
+
+
+class Issue24Base:
+ Map = None
+
+ def test_issue24(self):
+ keys = range(27)
+ new_entries = dict.fromkeys(keys, True)
+ m = self.Map(new_entries)
+ self.assertTrue(17 in m)
+ with m.mutate() as mm:
+ for i in keys:
+ del mm[i]
+ self.assertEqual(len(mm), 0)
+
+ def dump_check_node_kind(self, header, kind):
+ header = header.strip()
+ self.assertTrue(header.strip().startswith(kind))
+
+ def dump_check_node_size(self, header, size):
+ node_size = header.split('size=', 1)[1]
+ node_size = int(node_size.split(maxsplit=1)[0])
+ self.assertEqual(node_size, size)
+
+ def dump_check_bitmap_count(self, header, count):
+ header = header.split('bitmap=')[1]
+ bitmap = int(header.split(maxsplit=1)[0], 0)
+ self.assertEqual(map_bitcount(bitmap), count)
+
+ def dump_check_bitmap_node_count(self, header, count):
+ self.dump_check_node_kind(header, 'Bitmap')
+ self.dump_check_node_size(header, count * 2)
+ self.dump_check_bitmap_count(header, count)
+
+ def dump_check_collision_node_count(self, header, count):
+ self.dump_check_node_kind(header, 'Collision')
+ self.dump_check_node_size(header, 2 * count)
+
+ def test_bitmap_node_update_in_place_count(self):
+ keys = range(7)
+ new_entries = dict.fromkeys(keys, True)
+ m = self.Map(new_entries)
+ d = m.__dump__().splitlines()
+ self.assertTrue(d)
+ if d[0].startswith('HAMT'):
+ header = d[1] # skip _map.Map.__dump__() header
+ else:
+ header = d[0]
+ self.dump_check_bitmap_node_count(header, 7)
+
+ def test_bitmap_node_delete_in_place_count(self):
+ keys = range(7)
+ new_entries = dict.fromkeys(keys, True)
+ m = self.Map(new_entries)
+ with m.mutate() as mm:
+ del mm[0], mm[2], mm[3]
+ m2 = mm.finish()
+ d = m2.__dump__().splitlines()
+ self.assertTrue(d)
+ if d[0].startswith('HAMT'):
+ header = d[1] # skip _map.Map.__dump__() header
+ else:
+ header = d[0]
+ self.dump_check_bitmap_node_count(header, 4)
+
+ def test_collision_node_update_in_place_count(self):
+ keys = (CollisionKey() for i in range(7))
+ new_entries = dict.fromkeys(keys, True)
+ m = self.Map(new_entries)
+ d = m.__dump__().splitlines()
+ self.assertTrue(len(d) > 3)
+ # get node headers
+ if d[0].startswith('HAMT'):
+ h1, h2 = d[1], d[3] # skip _map.Map.__dump__() header
+ else:
+ h1, h2 = d[0], d[2]
+ self.dump_check_node_kind(h1, 'Bitmap')
+ self.dump_check_collision_node_count(h2, 7)
+
+ def test_collision_node_delete_in_place_count(self):
+ keys = [CollisionKey() for i in range(7)]
+ new_entries = dict.fromkeys(keys, True)
+ m = self.Map(new_entries)
+ with m.mutate() as mm:
+ del mm[keys[0]], mm[keys[2]], mm[keys[3]]
+ m2 = mm.finish()
+ d = m2.__dump__().splitlines()
+ self.assertTrue(len(d) > 3)
+ # get node headers
+ if d[0].startswith('HAMT'):
+ h1, h2 = d[1], d[3] # skip _map.Map.__dump__() header
+ else:
+ h1, h2 = d[0], d[2]
+ self.dump_check_node_kind(h1, 'Bitmap')
+ self.dump_check_collision_node_count(h2, 4)
+
+
+try:
+ from immutables._map import Map as CMap
+except ImportError:
+ CMap = None
+
+
+class Issue24PyTest(Issue24Base, unittest.TestCase):
+ Map = PyMap
+
+
+@unittest.skipIf(CMap is None, 'C Map is not available')
+class Issue24CTest(Issue24Base, unittest.TestCase):
+ Map = CMap
+
+ def hamt_dump_check_first_return_second(self, m):
+ d = m.__dump__().splitlines()
+ self.assertTrue(len(d) > 2)
+ self.assertTrue(d[0].startswith('HAMT'))
+ return d[1]
+
+ def test_array_node_update_in_place_count(self):
+ keys = range(27)
+ new_entries = dict.fromkeys(keys, True)
+ m = self.Map(new_entries)
+ header = self.hamt_dump_check_first_return_second(m)
+ self.dump_check_node_kind(header, 'Array')
+ for i in range(2, 18):
+ m = m.delete(i)
+ header = self.hamt_dump_check_first_return_second(m)
+ self.dump_check_bitmap_node_count(header, 11)
+
+ def test_array_node_delete_in_place_count(self):
+ keys = range(27)
+ new_entries = dict.fromkeys(keys, True)
+ m = self.Map(new_entries)
+ header = self.hamt_dump_check_first_return_second(m)
+ self.dump_check_node_kind(header, 'Array')
+ with m.mutate() as mm:
+ for i in range(5):
+ del mm[i]
+ m2 = mm.finish()
+ header = self.hamt_dump_check_first_return_second(m2)
+ self.dump_check_node_kind(header, 'Array')
+ for i in range(6, 17):
+ m2 = m2.delete(i)
+ header = self.hamt_dump_check_first_return_second(m2)
+ self.dump_check_bitmap_node_count(header, 11)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/test_map.py b/tests/test_map.py
new file mode 100644
index 0000000..4640029
--- /dev/null
+++ b/tests/test_map.py
@@ -0,0 +1,1417 @@
+import collections.abc
+import gc
+import pickle
+import random
+import sys
+import unittest
+import weakref
+
+from immutables.map import Map as PyMap
+from immutables._testutils import EqError
+from immutables._testutils import HashKey
+from immutables._testutils import HashKeyCrasher
+from immutables._testutils import HashingError
+from immutables._testutils import KeyStr
+from immutables._testutils import ReprError
+
+
+class BaseMapTest:
+
+ Map = None
+
+ def test_hashkey_helper_1(self):
+ k1 = HashKey(10, 'aaa')
+ k2 = HashKey(10, 'bbb')
+
+ self.assertNotEqual(k1, k2)
+ self.assertEqual(hash(k1), hash(k2))
+
+ d = dict()
+ d[k1] = 'a'
+ d[k2] = 'b'
+
+ self.assertEqual(d[k1], 'a')
+ self.assertEqual(d[k2], 'b')
+
+ def test_map_basics_1(self):
+ h = self.Map()
+ h = None # NoQA
+
+ def test_map_basics_2(self):
+ h = self.Map()
+ self.assertEqual(len(h), 0)
+
+ h2 = h.set('a', 'b')
+ self.assertIsNot(h, h2)
+ self.assertEqual(len(h), 0)
+ self.assertEqual(len(h2), 1)
+
+ self.assertIsNone(h.get('a'))
+ self.assertEqual(h.get('a', 42), 42)
+
+ self.assertEqual(h2.get('a'), 'b')
+
+ h3 = h2.set('b', 10)
+ self.assertIsNot(h2, h3)
+ self.assertEqual(len(h), 0)
+ self.assertEqual(len(h2), 1)
+ self.assertEqual(len(h3), 2)
+ self.assertEqual(h3.get('a'), 'b')
+ self.assertEqual(h3.get('b'), 10)
+
+ self.assertIsNone(h.get('b'))
+ self.assertIsNone(h2.get('b'))
+
+ self.assertIsNone(h.get('a'))
+ self.assertEqual(h2.get('a'), 'b')
+
+ h = h2 = h3 = None
+
+ def test_map_basics_3(self):
+ h = self.Map()
+ o = object()
+ h1 = h.set('1', o)
+ h2 = h1.set('1', o)
+ self.assertIs(h1, h2)
+
+ def test_map_basics_4(self):
+ h = self.Map()
+ h1 = h.set('key', [])
+ h2 = h1.set('key', [])
+ self.assertIsNot(h1, h2)
+ self.assertEqual(len(h1), 1)
+ self.assertEqual(len(h2), 1)
+ self.assertIsNot(h1.get('key'), h2.get('key'))
+
+ def test_map_collision_1(self):
+ k1 = HashKey(10, 'aaa')
+ k2 = HashKey(10, 'bbb')
+ k3 = HashKey(10, 'ccc')
+
+ h = self.Map()
+ h2 = h.set(k1, 'a')
+ h3 = h2.set(k2, 'b')
+
+ self.assertEqual(h.get(k1), None)
+ self.assertEqual(h.get(k2), None)
+
+ self.assertEqual(h2.get(k1), 'a')
+ self.assertEqual(h2.get(k2), None)
+
+ self.assertEqual(h3.get(k1), 'a')
+ self.assertEqual(h3.get(k2), 'b')
+
+ h4 = h3.set(k2, 'cc')
+ h5 = h4.set(k3, 'aa')
+
+ self.assertEqual(h3.get(k1), 'a')
+ self.assertEqual(h3.get(k2), 'b')
+ self.assertEqual(h4.get(k1), 'a')
+ self.assertEqual(h4.get(k2), 'cc')
+ self.assertEqual(h4.get(k3), None)
+ self.assertEqual(h5.get(k1), 'a')
+ self.assertEqual(h5.get(k2), 'cc')
+ self.assertEqual(h5.get(k2), 'cc')
+ self.assertEqual(h5.get(k3), 'aa')
+
+ self.assertEqual(len(h), 0)
+ self.assertEqual(len(h2), 1)
+ self.assertEqual(len(h3), 2)
+ self.assertEqual(len(h4), 2)
+ self.assertEqual(len(h5), 3)
+
+ def test_map_collision_2(self):
+ A = HashKey(100, 'A')
+ B = HashKey(101, 'B')
+ C = HashKey(0b011000011100000100, 'C')
+ D = HashKey(0b011000011100000100, 'D')
+ E = HashKey(0b1011000011100000100, 'E')
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+
+ # BitmapNode(size=6 bitmap=0b100110000):
+ # NULL:
+ # BitmapNode(size=4 bitmap=0b1000000000000000000001000):
+ # <Key name:A hash:100>: 'a'
+ # NULL:
+ # CollisionNode(size=4 id=0x108572410):
+ # <Key name:C hash:100100>: 'c'
+ # <Key name:D hash:100100>: 'd'
+ # <Key name:B hash:101>: 'b'
+
+ h = h.set(E, 'e')
+
+ # BitmapNode(size=4 count=2.0 bitmap=0b110000 id=10b8ea5c0):
+ # None:
+ # BitmapNode(size=4 count=2.0
+ # bitmap=0b1000000000000000000001000 id=10b8ea518):
+ # <Key name:A hash:100>: 'a'
+ # None:
+ # BitmapNode(size=2 count=1.0 bitmap=0b10
+ # id=10b8ea4a8):
+ # None:
+ # BitmapNode(size=4 count=2.0
+ # bitmap=0b100000001000
+ # id=10b8ea4e0):
+ # None:
+ # CollisionNode(size=4 id=10b8ea470):
+ # <Key name:C hash:100100>: 'c'
+ # <Key name:D hash:100100>: 'd'
+ # <Key name:E hash:362244>: 'e'
+ # <Key name:B hash:101>: 'b'
+
+ def test_map_stress_01(self):
+ COLLECTION_SIZE = 7000
+ TEST_ITERS_EVERY = 647
+ CRASH_HASH_EVERY = 97
+ CRASH_EQ_EVERY = 11
+ RUN_XTIMES = 3
+
+ for _ in range(RUN_XTIMES):
+ h = self.Map()
+ d = dict()
+
+ for i in range(COLLECTION_SIZE):
+ key = KeyStr(i)
+
+ if not (i % CRASH_HASH_EVERY):
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ h.set(key, i)
+
+ h = h.set(key, i)
+
+ if not (i % CRASH_EQ_EVERY):
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ h.get(KeyStr(i)) # really trigger __eq__
+
+ d[key] = i
+ self.assertEqual(len(d), len(h))
+
+ if not (i % TEST_ITERS_EVERY):
+ self.assertEqual(set(h.items()), set(d.items()))
+ self.assertEqual(len(h.items()), len(d.items()))
+
+ self.assertEqual(len(h), COLLECTION_SIZE)
+
+ for key in range(COLLECTION_SIZE):
+ self.assertEqual(h.get(KeyStr(key), 'not found'), key)
+
+ keys_to_delete = list(range(COLLECTION_SIZE))
+ random.shuffle(keys_to_delete)
+ for iter_i, i in enumerate(keys_to_delete):
+ key = KeyStr(i)
+
+ if not (iter_i % CRASH_HASH_EVERY):
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ h.delete(key)
+
+ if not (iter_i % CRASH_EQ_EVERY):
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ h.delete(KeyStr(i))
+
+ h = h.delete(key)
+ self.assertEqual(h.get(key, 'not found'), 'not found')
+ del d[key]
+ self.assertEqual(len(d), len(h))
+
+ if iter_i == COLLECTION_SIZE // 2:
+ hm = h
+ dm = d.copy()
+
+ if not (iter_i % TEST_ITERS_EVERY):
+ self.assertEqual(set(h.keys()), set(d.keys()))
+ self.assertEqual(len(h.keys()), len(d.keys()))
+
+ self.assertEqual(len(d), 0)
+ self.assertEqual(len(h), 0)
+
+ # ============
+
+ for key in dm:
+ self.assertEqual(hm.get(str(key)), dm[key])
+ self.assertEqual(len(dm), len(hm))
+
+ for i, key in enumerate(keys_to_delete):
+ if str(key) in dm:
+ hm = hm.delete(str(key))
+ dm.pop(str(key))
+ self.assertEqual(hm.get(str(key), 'not found'), 'not found')
+ self.assertEqual(len(d), len(h))
+
+ if not (i % TEST_ITERS_EVERY):
+ self.assertEqual(set(h.values()), set(d.values()))
+ self.assertEqual(len(h.values()), len(d.values()))
+
+ self.assertEqual(len(d), 0)
+ self.assertEqual(len(h), 0)
+ self.assertEqual(list(h.items()), [])
+
+ def test_map_collision_3(self):
+ # Test that iteration works with the deepest tree possible.
+
+ C = HashKey(0b10000000_00000000_00000000_00000000, 'C')
+ D = HashKey(0b10000000_00000000_00000000_00000000, 'D')
+
+ E = HashKey(0b00000000_00000000_00000000_00000000, 'E')
+
+ h = self.Map()
+ h = h.set(C, 'C')
+ h = h.set(D, 'D')
+ h = h.set(E, 'E')
+
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # NULL:
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # NULL:
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # NULL:
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # NULL:
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # NULL:
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # NULL:
+ # BitmapNode(size=4 count=2 bitmap=0b101):
+ # <Key name:E hash:0>: 'E'
+ # NULL:
+ # CollisionNode(size=4 id=0x107a24520):
+ # <Key name:C hash:2147483648>: 'C'
+ # <Key name:D hash:2147483648>: 'D'
+
+ self.assertEqual({k.name for k in h.keys()}, {'C', 'D', 'E'})
+
+ def test_map_stress_02(self):
+ COLLECTION_SIZE = 20000
+ TEST_ITERS_EVERY = 647
+ CRASH_HASH_EVERY = 97
+ DELETE_EVERY = 3
+ CRASH_EQ_EVERY = 11
+
+ h = self.Map()
+ d = dict()
+
+ for i in range(COLLECTION_SIZE // 2):
+ key = KeyStr(i)
+
+ if not (i % CRASH_HASH_EVERY):
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ h.set(key, i)
+
+ h = h.set(key, i)
+
+ if not (i % CRASH_EQ_EVERY):
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ h.get(KeyStr(i)) # really trigger __eq__
+
+ d[key] = i
+ self.assertEqual(len(d), len(h))
+
+ if not (i % TEST_ITERS_EVERY):
+ self.assertEqual(set(h.items()), set(d.items()))
+ self.assertEqual(len(h.items()), len(d.items()))
+
+ with h.mutate() as m:
+ for i in range(COLLECTION_SIZE // 2, COLLECTION_SIZE):
+ key = KeyStr(i)
+
+ if not (i % CRASH_HASH_EVERY):
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ m[key] = i
+
+ m[key] = i
+
+ if not (i % CRASH_EQ_EVERY):
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ m[KeyStr(i)]
+
+ d[key] = i
+ self.assertEqual(len(d), len(m))
+
+ if not (i % DELETE_EVERY):
+ del m[key]
+ del d[key]
+
+ self.assertEqual(len(d), len(m))
+
+ h = m.finish()
+
+ self.assertEqual(len(h), len(d))
+ self.assertEqual(set(h.items()), set(d.items()))
+
+ with h.mutate() as m:
+ for key in list(d):
+ del d[key]
+ del m[key]
+ self.assertEqual(len(m), len(d))
+ h = m.finish()
+
+ self.assertEqual(len(h), len(d))
+ self.assertEqual(set(h.items()), set(d.items()))
+
+ def test_map_delete_1(self):
+ A = HashKey(100, 'A')
+ B = HashKey(101, 'B')
+ C = HashKey(102, 'C')
+ D = HashKey(103, 'D')
+ E = HashKey(104, 'E')
+ Z = HashKey(-100, 'Z')
+
+ Er = HashKey(103, 'Er', error_on_eq_to=D)
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+ h = h.set(E, 'e')
+
+ orig_len = len(h)
+
+ # BitmapNode(size=10 bitmap=0b111110000 id=0x10eadc618):
+ # <Key name:A hash:100>: 'a'
+ # <Key name:B hash:101>: 'b'
+ # <Key name:C hash:102>: 'c'
+ # <Key name:D hash:103>: 'd'
+ # <Key name:E hash:104>: 'e'
+
+ h = h.delete(C)
+ self.assertEqual(len(h), orig_len - 1)
+
+ with self.assertRaisesRegex(ValueError, 'cannot compare'):
+ h.delete(Er)
+
+ h = h.delete(D)
+ self.assertEqual(len(h), orig_len - 2)
+
+ with self.assertRaises(KeyError) as ex:
+ h.delete(Z)
+ self.assertIs(ex.exception.args[0], Z)
+
+ h = h.delete(A)
+ self.assertEqual(len(h), orig_len - 3)
+
+ self.assertEqual(h.get(A, 42), 42)
+ self.assertEqual(h.get(B), 'b')
+ self.assertEqual(h.get(E), 'e')
+
+ def test_map_delete_2(self):
+ A = HashKey(100, 'A')
+ B = HashKey(201001, 'B')
+ C = HashKey(101001, 'C')
+ BLike = HashKey(201001, 'B-like')
+ D = HashKey(103, 'D')
+ E = HashKey(104, 'E')
+ Z = HashKey(-100, 'Z')
+
+ Er = HashKey(201001, 'Er', error_on_eq_to=B)
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+ h = h.set(E, 'e')
+
+ h = h.set(B, 'b') # trigger branch in BitmapNode.assoc
+
+ with self.assertRaises(KeyError):
+ h.delete(BLike) # trigger branch in BitmapNode.without
+
+ orig_len = len(h)
+
+ # BitmapNode(size=8 bitmap=0b1110010000):
+ # <Key name:A hash:100>: 'a'
+ # <Key name:D hash:103>: 'd'
+ # <Key name:E hash:104>: 'e'
+ # NULL:
+ # BitmapNode(size=4 bitmap=0b100000000001000000000):
+ # <Key name:B hash:201001>: 'b'
+ # <Key name:C hash:101001>: 'c'
+
+ with self.assertRaisesRegex(ValueError, 'cannot compare'):
+ h.delete(Er)
+
+ with self.assertRaises(KeyError) as ex:
+ h.delete(Z)
+ self.assertIs(ex.exception.args[0], Z)
+ self.assertEqual(len(h), orig_len)
+
+ h = h.delete(C)
+ self.assertEqual(len(h), orig_len - 1)
+
+ h = h.delete(B)
+ self.assertEqual(len(h), orig_len - 2)
+
+ h = h.delete(A)
+ self.assertEqual(len(h), orig_len - 3)
+
+ self.assertEqual(h.get(D), 'd')
+ self.assertEqual(h.get(E), 'e')
+
+ with self.assertRaises(KeyError):
+ h = h.delete(A)
+ with self.assertRaises(KeyError):
+ h = h.delete(B)
+ h = h.delete(D)
+ h = h.delete(E)
+ self.assertEqual(len(h), 0)
+
+ def test_map_delete_3(self):
+ A = HashKey(0b00000000001100100, 'A')
+ B = HashKey(0b00000000001100101, 'B')
+
+ C = HashKey(0b11000011100000100, 'C')
+ D = HashKey(0b11000011100000100, 'D')
+ X = HashKey(0b01000011100000100, 'Z')
+ Y = HashKey(0b11000011100000100, 'Y')
+
+ E = HashKey(0b00000000001101000, 'E')
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+ h = h.set(E, 'e')
+
+ self.assertEqual(len(h), 5)
+ h = h.set(C, 'c') # trigger branch in CollisionNode.assoc
+ self.assertEqual(len(h), 5)
+
+ orig_len = len(h)
+
+ with self.assertRaises(KeyError):
+ h.delete(X)
+ with self.assertRaises(KeyError):
+ h.delete(Y)
+
+ # BitmapNode(size=6 bitmap=0b100110000):
+ # NULL:
+ # BitmapNode(size=4 bitmap=0b1000000000000000000001000):
+ # <Key name:A hash:100>: 'a'
+ # NULL:
+ # CollisionNode(size=4 id=0x108572410):
+ # <Key name:C hash:100100>: 'c'
+ # <Key name:D hash:100100>: 'd'
+ # <Key name:B hash:101>: 'b'
+ # <Key name:E hash:104>: 'e'
+
+ h = h.delete(A)
+ self.assertEqual(len(h), orig_len - 1)
+
+ h = h.delete(E)
+ self.assertEqual(len(h), orig_len - 2)
+
+ self.assertEqual(h.get(C), 'c')
+ self.assertEqual(h.get(B), 'b')
+
+ h2 = h.delete(C)
+ self.assertEqual(len(h2), orig_len - 3)
+
+ h2 = h.delete(D)
+ self.assertEqual(len(h2), orig_len - 3)
+
+ self.assertEqual(len(h), orig_len - 2)
+
+ def test_map_delete_4(self):
+ A = HashKey(100, 'A')
+ B = HashKey(101, 'B')
+ C = HashKey(100100, 'C')
+ D = HashKey(100100, 'D')
+ E = HashKey(100100, 'E')
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+ h = h.set(E, 'e')
+
+ orig_len = len(h)
+
+ # BitmapNode(size=4 bitmap=0b110000):
+ # NULL:
+ # BitmapNode(size=4 bitmap=0b1000000000000000000001000):
+ # <Key name:A hash:100>: 'a'
+ # NULL:
+ # CollisionNode(size=6 id=0x10515ef30):
+ # <Key name:C hash:100100>: 'c'
+ # <Key name:D hash:100100>: 'd'
+ # <Key name:E hash:100100>: 'e'
+ # <Key name:B hash:101>: 'b'
+
+ h = h.delete(D)
+ self.assertEqual(len(h), orig_len - 1)
+
+ h = h.delete(E)
+ self.assertEqual(len(h), orig_len - 2)
+
+ h = h.delete(C)
+ self.assertEqual(len(h), orig_len - 3)
+
+ h = h.delete(A)
+ self.assertEqual(len(h), orig_len - 4)
+
+ h = h.delete(B)
+ self.assertEqual(len(h), 0)
+
+ def test_map_delete_5(self):
+ h = self.Map()
+
+ keys = []
+ for i in range(17):
+ key = HashKey(i, str(i))
+ keys.append(key)
+ h = h.set(key, 'val-{}'.format(i))
+
+ collision_key16 = HashKey(16, '18')
+ h = h.set(collision_key16, 'collision')
+
+ # ArrayNode(id=0x10f8b9318):
+ # 0::
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # <Key name:0 hash:0>: 'val-0'
+ #
+ # ... 14 more BitmapNodes ...
+ #
+ # 15::
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # <Key name:15 hash:15>: 'val-15'
+ #
+ # 16::
+ # BitmapNode(size=2 count=1 bitmap=0b1):
+ # NULL:
+ # CollisionNode(size=4 id=0x10f2f5af8):
+ # <Key name:16 hash:16>: 'val-16'
+ # <Key name:18 hash:16>: 'collision'
+
+ self.assertEqual(len(h), 18)
+
+ h = h.delete(keys[2])
+ self.assertEqual(len(h), 17)
+
+ h = h.delete(collision_key16)
+ self.assertEqual(len(h), 16)
+ h = h.delete(keys[16])
+ self.assertEqual(len(h), 15)
+
+ h = h.delete(keys[1])
+ self.assertEqual(len(h), 14)
+ with self.assertRaises(KeyError) as ex:
+ h.delete(keys[1])
+ self.assertIs(ex.exception.args[0], keys[1])
+ self.assertEqual(len(h), 14)
+
+ for key in keys:
+ if key in h:
+ h = h.delete(key)
+ self.assertEqual(len(h), 0)
+
+ def test_map_delete_6(self):
+ h = self.Map()
+ h = h.set(1, 1)
+ h = h.delete(1)
+ self.assertEqual(len(h), 0)
+ self.assertEqual(h, self.Map())
+
+ def test_map_items_1(self):
+ A = HashKey(100, 'A')
+ B = HashKey(201001, 'B')
+ C = HashKey(101001, 'C')
+ D = HashKey(103, 'D')
+ E = HashKey(104, 'E')
+ F = HashKey(110, 'F')
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+ h = h.set(E, 'e')
+ h = h.set(F, 'f')
+
+ it = h.items()
+ self.assertEqual(
+ set(list(it)),
+ {(A, 'a'), (B, 'b'), (C, 'c'), (D, 'd'), (E, 'e'), (F, 'f')})
+
+ def test_map_items_2(self):
+ A = HashKey(100, 'A')
+ B = HashKey(101, 'B')
+ C = HashKey(100100, 'C')
+ D = HashKey(100100, 'D')
+ E = HashKey(100100, 'E')
+ F = HashKey(110, 'F')
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+ h = h.set(E, 'e')
+ h = h.set(F, 'f')
+
+ it = h.items()
+ self.assertEqual(
+ set(list(it)),
+ {(A, 'a'), (B, 'b'), (C, 'c'), (D, 'd'), (E, 'e'), (F, 'f')})
+
+ def test_map_items_3(self):
+ h = self.Map()
+ self.assertEqual(len(h.items()), 0)
+ self.assertEqual(list(h.items()), [])
+
+ def test_map_items_4(self):
+ h = self.Map(a=1, b=2, c=3)
+ k = h.items()
+ self.assertEqual(set(k), {('a', 1), ('b', 2), ('c', 3)})
+ self.assertEqual(set(k), {('a', 1), ('b', 2), ('c', 3)})
+
+ def test_map_keys_1(self):
+ A = HashKey(100, 'A')
+ B = HashKey(101, 'B')
+ C = HashKey(100100, 'C')
+ D = HashKey(100100, 'D')
+ E = HashKey(100100, 'E')
+ F = HashKey(110, 'F')
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+ h = h.set(E, 'e')
+ h = h.set(F, 'f')
+
+ self.assertEqual(set(list(h.keys())), {A, B, C, D, E, F})
+ self.assertEqual(set(list(h)), {A, B, C, D, E, F})
+
+ def test_map_keys_2(self):
+ h = self.Map(a=1, b=2, c=3)
+ k = h.keys()
+ self.assertEqual(set(k), {'a', 'b', 'c'})
+ self.assertEqual(set(k), {'a', 'b', 'c'})
+
+ def test_map_values_1(self):
+ A = HashKey(100, 'A')
+ B = HashKey(101, 'B')
+ C = HashKey(100100, 'C')
+ D = HashKey(100100, 'D')
+ E = HashKey(100100, 'E')
+ F = HashKey(110, 'F')
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(B, 'b')
+ h = h.set(C, 'c')
+ h = h.set(D, 'd')
+ h = h.set(E, 'e')
+ h = h.set(F, 'f')
+
+ self.assertEqual(set(list(h.values())), {'a', 'b', 'c', 'd', 'e', 'f'})
+
+ def test_map_values_2(self):
+ h = self.Map(a=1, b=2, c=3)
+ k = h.values()
+ self.assertEqual(set(k), {1, 2, 3})
+ self.assertEqual(set(k), {1, 2, 3})
+
+ def test_map_eq_1(self):
+ A = HashKey(100, 'A')
+ B = HashKey(101, 'B')
+ C = HashKey(100100, 'C')
+ D = HashKey(100100, 'D')
+ E = HashKey(120, 'E')
+
+ h1 = self.Map()
+ h1 = h1.set(A, 'a')
+ h1 = h1.set(B, 'b')
+ h1 = h1.set(C, 'c')
+ h1 = h1.set(D, 'd')
+
+ h2 = self.Map()
+ h2 = h2.set(A, 'a')
+
+ self.assertFalse(h1 == h2)
+ self.assertTrue(h1 != h2)
+
+ h2 = h2.set(B, 'b')
+ self.assertFalse(h1 == h2)
+ self.assertTrue(h1 != h2)
+
+ h2 = h2.set(C, 'c')
+ self.assertFalse(h1 == h2)
+ self.assertTrue(h1 != h2)
+
+ h2 = h2.set(D, 'd2')
+ self.assertFalse(h1 == h2)
+ self.assertTrue(h1 != h2)
+
+ h2 = h2.set(D, 'd')
+ self.assertTrue(h1 == h2)
+ self.assertFalse(h1 != h2)
+
+ h2 = h2.set(E, 'e')
+ self.assertFalse(h1 == h2)
+ self.assertTrue(h1 != h2)
+
+ h2 = h2.delete(D)
+ self.assertFalse(h1 == h2)
+ self.assertTrue(h1 != h2)
+
+ h2 = h2.set(E, 'd')
+ self.assertFalse(h1 == h2)
+ self.assertTrue(h1 != h2)
+
+ def test_map_eq_2(self):
+ A = HashKey(100, 'A')
+ Er = HashKey(100, 'Er', error_on_eq_to=A)
+
+ h1 = self.Map()
+ h1 = h1.set(A, 'a')
+
+ h2 = self.Map()
+ h2 = h2.set(Er, 'a')
+
+ with self.assertRaisesRegex(ValueError, 'cannot compare'):
+ h1 == h2
+
+ with self.assertRaisesRegex(ValueError, 'cannot compare'):
+ h1 != h2
+
+ def test_map_eq_3(self):
+ self.assertNotEqual(self.Map(), 1)
+
+ def test_map_gc_1(self):
+ A = HashKey(100, 'A')
+
+ h = self.Map()
+ h = h.set(0, 0) # empty Map node is memoized in _map.c
+ ref = weakref.ref(h)
+
+ a = []
+ a.append(a)
+ a.append(h)
+ b = []
+ a.append(b)
+ b.append(a)
+ h = h.set(A, b)
+
+ del h, a, b
+
+ gc.collect()
+ gc.collect()
+ gc.collect()
+
+ self.assertIsNone(ref())
+
+ def test_map_gc_2(self):
+ A = HashKey(100, 'A')
+
+ h = self.Map()
+ h = h.set(A, 'a')
+ h = h.set(A, h)
+
+ ref = weakref.ref(h)
+ hi = iter(h.items())
+ next(hi)
+
+ del h, hi
+
+ gc.collect()
+ gc.collect()
+ gc.collect()
+
+ self.assertIsNone(ref())
+
+ def test_map_in_1(self):
+ A = HashKey(100, 'A')
+ AA = HashKey(100, 'A')
+
+ B = HashKey(101, 'B')
+
+ h = self.Map()
+ h = h.set(A, 1)
+
+ self.assertTrue(A in h)
+ self.assertFalse(B in h)
+
+ with self.assertRaises(EqError):
+ with HashKeyCrasher(error_on_eq=True):
+ AA in h
+
+ with self.assertRaises(HashingError):
+ with HashKeyCrasher(error_on_hash=True):
+ AA in h
+
+ def test_map_getitem_1(self):
+ A = HashKey(100, 'A')
+ AA = HashKey(100, 'A')
+
+ B = HashKey(101, 'B')
+
+ h = self.Map()
+ h = h.set(A, 1)
+
+ self.assertEqual(h[A], 1)
+ self.assertEqual(h[AA], 1)
+
+ with self.assertRaises(KeyError):
+ h[B]
+
+ with self.assertRaises(EqError):
+ with HashKeyCrasher(error_on_eq=True):
+ h[AA]
+
+ with self.assertRaises(HashingError):
+ with HashKeyCrasher(error_on_hash=True):
+ h[AA]
+
+ def test_repr_1(self):
+ h = self.Map()
+ self.assertEqual(repr(h), 'immutables.Map({})')
+
+ h = h.set(1, 2).set(2, 3).set(3, 4)
+ self.assertEqual(repr(h), 'immutables.Map({1: 2, 2: 3, 3: 4})')
+
+ def test_repr_2(self):
+ h = self.Map()
+ A = HashKey(100, 'A')
+
+ with self.assertRaises(ReprError):
+ with HashKeyCrasher(error_on_repr=True):
+ repr(h.set(1, 2).set(A, 3).set(3, 4))
+
+ with self.assertRaises(ReprError):
+ with HashKeyCrasher(error_on_repr=True):
+ repr(h.set(1, 2).set(2, A).set(3, 4))
+
+ def test_repr_3(self):
+ class Key:
+ def __init__(self):
+ self.val = None
+
+ def __hash__(self):
+ return 123
+
+ def __repr__(self):
+ return repr(self.val)
+
+ h = self.Map()
+ k = Key()
+ h = h.set(k, 1)
+ k.val = h
+
+ self.assertEqual(repr(h), 'immutables.Map({{...}: 1})')
+
+ def test_hash_1(self):
+ h = self.Map()
+ self.assertNotEqual(hash(h), -1)
+ self.assertEqual(hash(h), hash(h))
+
+ h = h.set(1, 2).set('a', 'b')
+ self.assertNotEqual(hash(h), -1)
+ self.assertEqual(hash(h), hash(h))
+
+ self.assertEqual(
+ hash(h.set(1, 2).set('a', 'b')),
+ hash(h.set('a', 'b').set(1, 2)))
+
+ def test_hash_2(self):
+ h = self.Map()
+ A = HashKey(100, 'A')
+
+ m = h.set(1, 2).set(A, 3).set(3, 4)
+ with self.assertRaises(HashingError):
+ with HashKeyCrasher(error_on_hash=True):
+ hash(m)
+
+ m = h.set(1, 2).set(2, A).set(3, 4)
+ with self.assertRaises(HashingError):
+ with HashKeyCrasher(error_on_hash=True):
+ hash(m)
+
+ def test_abc_1(self):
+ self.assertTrue(issubclass(self.Map, collections.abc.Mapping))
+
+ def test_map_mut_1(self):
+ h = self.Map()
+ h = h.set('a', 1)
+
+ hm1 = h.mutate()
+ hm2 = h.mutate()
+
+ self.assertFalse(isinstance(hm1, self.Map))
+
+ self.assertIsNot(hm1, hm2)
+ self.assertEqual(hm1['a'], 1)
+ self.assertEqual(hm2['a'], 1)
+
+ hm1.set('b', 2)
+ hm1.set('c', 3)
+
+ hm2.set('x', 100)
+ hm2.set('a', 1000)
+
+ self.assertEqual(hm1['a'], 1)
+ self.assertEqual(hm1.get('x', -1), -1)
+
+ self.assertEqual(hm2['a'], 1000)
+ self.assertTrue('x' in hm2)
+
+ h1 = hm1.finish()
+ h2 = hm2.finish()
+
+ self.assertTrue(isinstance(h1, self.Map))
+
+ self.assertEqual(dict(h.items()), {'a': 1})
+ self.assertEqual(dict(h1.items()), {'a': 1, 'b': 2, 'c': 3})
+ self.assertEqual(dict(h2.items()), {'a': 1000, 'x': 100})
+
+ def test_map_mut_2(self):
+ h = self.Map()
+ h = h.set('a', 1)
+
+ hm1 = h.mutate()
+ hm1.set('a', 2)
+ hm1.set('a', 3)
+ hm1.set('a', 4)
+ h2 = hm1.finish()
+
+ self.assertEqual(dict(h.items()), {'a': 1})
+ self.assertEqual(dict(h2.items()), {'a': 4})
+
+ def test_map_mut_3(self):
+ h = self.Map()
+ h = h.set('a', 1)
+ hm1 = h.mutate()
+
+ self.assertEqual(repr(hm1), "immutables.MapMutation({'a': 1})")
+
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(hm1)
+
+ def test_map_mut_4(self):
+ h = self.Map()
+ h = h.set('a', 1)
+ h = h.set('b', 2)
+
+ hm1 = h.mutate()
+ hm2 = h.mutate()
+
+ self.assertEqual(hm1, hm2)
+
+ hm1.set('a', 10)
+ self.assertNotEqual(hm1, hm2)
+
+ hm2.set('a', 10)
+ self.assertEqual(hm1, hm2)
+
+ self.assertEqual(hm2.pop('a'), 10)
+ self.assertNotEqual(hm1, hm2)
+
+ def test_map_mut_5(self):
+ h = self.Map({'a': 1, 'b': 2}, z=100)
+ self.assertTrue(isinstance(h, self.Map))
+ self.assertEqual(dict(h.items()), {'a': 1, 'b': 2, 'z': 100})
+
+ h2 = h.update(z=200, y=-1)
+ self.assertEqual(dict(h.items()), {'a': 1, 'b': 2, 'z': 100})
+ self.assertEqual(dict(h2.items()), {'a': 1, 'b': 2, 'z': 200, 'y': -1})
+
+ h3 = h2.update([(1, 2), (3, 4)])
+ self.assertEqual(dict(h.items()), {'a': 1, 'b': 2, 'z': 100})
+ self.assertEqual(dict(h2.items()), {'a': 1, 'b': 2, 'z': 200, 'y': -1})
+ self.assertEqual(dict(h3.items()),
+ {'a': 1, 'b': 2, 'z': 200, 'y': -1, 1: 2, 3: 4})
+
+ h4 = h3.update()
+ self.assertIs(h4, h3)
+
+ h5 = h4.update(self.Map({'zzz': 'yyz'}))
+
+ self.assertEqual(dict(h5.items()),
+ {'a': 1, 'b': 2, 'z': 200, 'y': -1, 1: 2, 3: 4,
+ 'zzz': 'yyz'})
+
+ def test_map_mut_6(self):
+ h = self.Map({'a': 1, 'b': 2}, z=100)
+ self.assertEqual(dict(h.items()), {'a': 1, 'b': 2, 'z': 100})
+
+ with self.assertRaisesRegex(TypeError, 'not iterable'):
+ h.update(1)
+
+ with self.assertRaisesRegex(ValueError, 'map update sequence element'):
+ h.update([(1, 2), (3, 4, 5)])
+
+ with self.assertRaisesRegex(TypeError, 'cannot convert map update'):
+ h.update([(1, 2), 1])
+
+ self.assertEqual(dict(h.items()), {'a': 1, 'b': 2, 'z': 100})
+
+ def test_map_mut_7(self):
+ key = HashKey(123, 'aaa')
+
+ h = self.Map({'a': 1, 'b': 2}, z=100)
+ self.assertEqual(dict(h.items()), {'a': 1, 'b': 2, 'z': 100})
+
+ upd = {key: 1}
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ h.update(upd)
+
+ upd = self.Map({key: 'zzz'})
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ h.update(upd)
+
+ upd = [(1, 2), (key, 'zzz')]
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ h.update(upd)
+
+ self.assertEqual(dict(h.items()), {'a': 1, 'b': 2, 'z': 100})
+
+ def test_map_mut_8(self):
+ key1 = HashKey(123, 'aaa')
+ key2 = HashKey(123, 'bbb')
+
+ h = self.Map({key1: 123})
+ self.assertEqual(dict(h.items()), {key1: 123})
+
+ upd = {key2: 1}
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ h.update(upd)
+
+ upd = self.Map({key2: 'zzz'})
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ h.update(upd)
+
+ upd = [(1, 2), (key2, 'zzz')]
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ h.update(upd)
+
+ self.assertEqual(dict(h.items()), {key1: 123})
+
+ def test_map_mut_9(self):
+ key1 = HashKey(123, 'aaa')
+
+ src = {key1: 123}
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ self.Map(src)
+
+ src = [(1, 2), (key1, 123)]
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ self.Map(src)
+
+ def test_map_mut_10(self):
+ key1 = HashKey(123, 'aaa')
+
+ m = self.Map({key1: 123})
+
+ mm = m.mutate()
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ del mm[key1]
+
+ mm = m.mutate()
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ mm.pop(key1, None)
+
+ mm = m.mutate()
+ with HashKeyCrasher(error_on_hash=True):
+ with self.assertRaises(HashingError):
+ mm.set(key1, 123)
+
+ def test_map_mut_11(self):
+ m = self.Map({'a': 1, 'b': 2})
+
+ mm = m.mutate()
+ self.assertEqual(mm.pop('a', 1), 1)
+ self.assertEqual(mm.finish(), self.Map({'b': 2}))
+
+ mm = m.mutate()
+ self.assertEqual(mm.pop('b', 1), 2)
+ self.assertEqual(mm.finish(), self.Map({'a': 1}))
+
+ mm = m.mutate()
+ self.assertEqual(mm.pop('b', 1), 2)
+ del mm['a']
+ self.assertEqual(mm.finish(), self.Map())
+
+ def test_map_mut_12(self):
+ m = self.Map({'a': 1, 'b': 2})
+
+ mm = m.mutate()
+ mm.finish()
+
+ with self.assertRaisesRegex(ValueError, 'has been finished'):
+ mm.pop('a')
+
+ with self.assertRaisesRegex(ValueError, 'has been finished'):
+ del mm['a']
+
+ with self.assertRaisesRegex(ValueError, 'has been finished'):
+ mm.set('a', 'b')
+
+ with self.assertRaisesRegex(ValueError, 'has been finished'):
+ mm['a'] = 'b'
+
+ with self.assertRaisesRegex(ValueError, 'has been finished'):
+ mm.update(a='b')
+
+ def test_map_mut_13(self):
+ key1 = HashKey(123, 'aaa')
+ key2 = HashKey(123, 'aaa')
+
+ m = self.Map({key1: 123})
+
+ mm = m.mutate()
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ del mm[key2]
+
+ mm = m.mutate()
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ mm.pop(key2, None)
+
+ mm = m.mutate()
+ with HashKeyCrasher(error_on_eq=True):
+ with self.assertRaises(EqError):
+ mm.set(key2, 123)
+
+ def test_map_mut_14(self):
+ m = self.Map(a=1, b=2)
+
+ with m.mutate() as mm:
+ mm['z'] = 100
+ del mm['a']
+
+ self.assertEqual(mm.finish(), self.Map(z=100, b=2))
+
+ def test_map_mut_15(self):
+ m = self.Map(a=1, b=2)
+
+ with self.assertRaises(ZeroDivisionError):
+ with m.mutate() as mm:
+ mm['z'] = 100
+ del mm['a']
+ 1 / 0
+
+ self.assertEqual(mm.finish(), self.Map(z=100, b=2))
+ self.assertEqual(m, self.Map(a=1, b=2))
+
+ def test_map_mut_16(self):
+ m = self.Map(a=1, b=2)
+ hash(m)
+
+ m2 = self.Map(m)
+ m3 = self.Map(m, c=3)
+
+ self.assertEqual(m, m2)
+ self.assertEqual(len(m), len(m2))
+ self.assertEqual(hash(m), hash(m2))
+
+ self.assertIsNot(m, m2)
+ self.assertEqual(m3, self.Map(a=1, b=2, c=3))
+
+ def test_map_mut_17(self):
+ m = self.Map(a=1)
+ with m.mutate() as mm:
+ with self.assertRaisesRegex(
+ TypeError, 'cannot create Maps from MapMutations'):
+ self.Map(mm)
+
+ def test_map_mut_18(self):
+ m = self.Map(a=1, b=2)
+ with m.mutate() as mm:
+ mm.update(self.Map(x=1), z=2)
+ mm.update(c=3)
+ mm.update({'n': 100, 'a': 20})
+ m2 = mm.finish()
+
+ expected = self.Map(
+ {'b': 2, 'c': 3, 'n': 100, 'z': 2, 'x': 1, 'a': 20})
+
+ self.assertEqual(len(m2), 6)
+ self.assertEqual(m2, expected)
+ self.assertEqual(m, self.Map(a=1, b=2))
+
+ def test_map_mut_19(self):
+ m = self.Map(a=1, b=2)
+ m2 = m.update({'a': 20})
+ self.assertEqual(len(m2), 2)
+
+ def test_map_mut_20(self):
+ # Issue 24:
+
+ h = self.Map()
+
+ for i in range(19):
+ # Create more than 16 keys to trigger the root bitmap
+ # node to be converted into an array node
+ h = h.set(HashKey(i, i), i)
+
+ h = h.set(HashKey(18, '18-collision'), 18)
+
+ with h.mutate() as m:
+ del m[HashKey(18, 18)]
+ del m[HashKey(18, '18-collision')]
+
+ # The pre-issue-24 code failed to update the number of array
+ # node element, so at this point it would be greater than it
+ # actually is.
+ h = m.finish()
+
+ # Any of the below operations shouldn't crash the debug build.
+ with h.mutate() as m:
+ for i in range(18):
+ del m[HashKey(i, i)]
+ h = m.finish()
+ h = h.set(HashKey(21, 21), 21)
+ h = h.set(HashKey(22, 22), 22)
+
+ def test_map_mut_21(self):
+ # Issue 24:
+ # Array nodes, while in mutation, failed to increment the
+ # internal count of elements when adding a new key to it.
+ # Because the internal count
+
+ h = self.Map()
+
+ for i in range(18):
+ # Create more than 16 keys to trigger the root bitmap
+ # node to be converted into an array node
+ h = h.set(HashKey(i, i), i)
+
+ with h.mutate() as m:
+ # Add one new key to the array node
+ m[HashKey(18, 18)] = 18
+ # Add another key -- after this the old code failed
+ # to increment the number of elements in the mutated
+ # array node.
+ m[HashKey(19, 19)] = 19
+ h = m.finish()
+
+ for i in range(20):
+ # Start deleting keys one by one. Because array node
+ # element count was accounted incorrectly (smaller by 1
+ # than it actually is, the mutation for "del h[18]" would
+ # create an empty array node, clipping the "19" key).
+ # Before the issue #24 fix, the below line would crash
+ # on i=19.
+ h = h.delete(HashKey(i, i))
+
+ def test_map_mut_stress(self):
+ COLLECTION_SIZE = 7000
+ TEST_ITERS_EVERY = 647
+ RUN_XTIMES = 3
+
+ for _ in range(RUN_XTIMES):
+ h = self.Map()
+ d = dict()
+
+ for i in range(COLLECTION_SIZE // TEST_ITERS_EVERY):
+
+ hm = h.mutate()
+ for j in range(TEST_ITERS_EVERY):
+ key = random.randint(1, 100000)
+ key = HashKey(key % 271, str(key))
+
+ hm.set(key, key)
+ d[key] = key
+
+ self.assertEqual(len(hm), len(d))
+
+ h2 = hm.finish()
+ self.assertEqual(dict(h2.items()), d)
+ h = h2
+
+ self.assertEqual(dict(h.items()), d)
+ self.assertEqual(len(h), len(d))
+
+ it = iter(tuple(d.keys()))
+ for i in range(COLLECTION_SIZE // TEST_ITERS_EVERY):
+
+ hm = h.mutate()
+ for j in range(TEST_ITERS_EVERY):
+ try:
+ key = next(it)
+ except StopIteration:
+ break
+
+ del d[key]
+ del hm[key]
+
+ self.assertEqual(len(hm), len(d))
+
+ h2 = hm.finish()
+ self.assertEqual(dict(h2.items()), d)
+ h = h2
+
+ self.assertEqual(dict(h.items()), d)
+ self.assertEqual(len(h), len(d))
+
+ def test_map_pickle(self):
+ h = self.Map(a=1, b=2)
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ p = pickle.dumps(h, proto)
+ uh = pickle.loads(p)
+
+ self.assertTrue(isinstance(uh, self.Map))
+ self.assertEqual(h, uh)
+
+ with self.assertRaisesRegex(TypeError, "can('t|not) pickle"):
+ pickle.dumps(h.mutate())
+
+ @unittest.skipIf(
+ sys.version_info < (3, 7, 0), "__class_getitem__ is not available"
+ )
+ def test_map_is_subscriptable(self):
+ self.assertIs(self.Map[int, str], self.Map)
+
+ def test_kwarg_named_col(self):
+ self.assertEqual(dict(self.Map(col=0)), {"col": 0})
+ self.assertEqual(dict(self.Map(a=0, col=1)), {"a": 0, "col": 1})
+ self.assertEqual(dict(self.Map({"a": 0}, col=1)), {"a": 0, "col": 1})
+
+
+class PyMapTest(BaseMapTest, unittest.TestCase):
+
+ Map = PyMap
+
+
+try:
+ from immutables._map import Map as CMap
+except ImportError:
+ CMap = None
+
+
+@unittest.skipIf(CMap is None, 'C Map is not available')
+class CMapTest(BaseMapTest, unittest.TestCase):
+
+ Map = CMap
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_mypy.py b/tests/test_mypy.py
new file mode 100644
index 0000000..fcdc11c
--- /dev/null
+++ b/tests/test_mypy.py
@@ -0,0 +1,26 @@
+import os
+
+try:
+ import mypy.test.testcmdline
+ from mypy.test.helpers import normalize_error_messages
+except (ImportError, AssertionError):
+ if os.environ.get('IMMU_SKIP_MYPY_TESTS'):
+ pass
+ else:
+ raise
+else:
+ # I'm upset. There's no other way to deal with the little 'defined here'
+ # notes that mypy emits when passing an unexpected keyword argument
+ # and at no other time.
+ def renormalize_error_messages(messages):
+ messages = [x for x in messages if not x.endswith(' defined here')]
+ return normalize_error_messages(messages)
+
+ mypy.test.testcmdline.normalize_error_messages = renormalize_error_messages
+
+ this_file_dir = os.path.dirname(os.path.realpath(__file__))
+ test_data_prefix = os.path.join(this_file_dir, 'test-data')
+
+ class ImmuMypyTest(mypy.test.testcmdline.PythonCmdlineSuite):
+ data_prefix = test_data_prefix
+ files = ['check-immu.test']
diff --git a/tests/test_none_keys.py b/tests/test_none_keys.py
new file mode 100644
index 0000000..26d4220
--- /dev/null
+++ b/tests/test_none_keys.py
@@ -0,0 +1,515 @@
+import ctypes
+import unittest
+
+from immutables.map import map_hash, map_mask, Map as PyMap
+from immutables._testutils import HashKey
+
+
+none_hash = map_hash(None)
+assert(none_hash != 1)
+assert(none_hash.bit_length() <= 32)
+
+none_hash_u = ctypes.c_size_t(none_hash).value
+not_collision = 0xffffffff & (~none_hash_u)
+
+mask = 0x7ffffffff
+none_collisions = [none_hash_u & (mask >> shift)
+ for shift in reversed(range(0, 32, 5))]
+assert(len(none_collisions) == 7)
+none_collisions = [
+ ctypes.c_ssize_t(h | (not_collision & (mask << shift))).value
+ for shift, h in zip(range(5, 37, 5), none_collisions)
+]
+
+
+class NoneCollision(HashKey):
+ def __init__(self, name, level):
+ if name is None:
+ raise ValueError("Can't have a NoneCollision with a None value")
+ super().__init__(none_collisions[level], name)
+
+ def __eq__(self, other):
+ if other is None:
+ return False
+ return super().__eq__(other)
+
+ __hash__ = HashKey.__hash__
+
+
+class BaseNoneTest:
+ Map = None
+
+ def test_none_collisions(self):
+ collisions = [NoneCollision('a', level) for level in range(7)]
+ indices = [map_mask(none_hash, shift) for shift in range(0, 32, 5)]
+
+ for i, c in enumerate(collisions[:-1], 1):
+ self.assertNotEqual(c, None)
+ c_hash = map_hash(c)
+ self.assertNotEqual(c_hash, none_hash)
+ for j, idx in enumerate(indices[:i]):
+ self.assertEqual(map_mask(c_hash, j*5), idx)
+ for j, idx in enumerate(indices[i:], i):
+ self.assertNotEqual(map_mask(c_hash, j*5), idx)
+
+ c = collisions[-1]
+ self.assertNotEqual(c, None)
+ c_hash = map_hash(c)
+ self.assertEqual(c_hash, none_hash)
+ for i, idx in enumerate(indices):
+ self.assertEqual(map_mask(c_hash, i*5), idx)
+
+ def test_none_as_key(self):
+ m = self.Map({None: 1})
+
+ self.assertEqual(len(m), 1)
+ self.assertTrue(None in m)
+ self.assertEqual(m[None], 1)
+ self.assertEqual(repr(m), 'immutables.Map({None: 1})')
+
+ for level in range(7):
+ key = NoneCollision('a', level)
+ self.assertFalse(key in m)
+ with self.assertRaises(KeyError):
+ m.delete(key)
+
+ m = m.delete(None)
+ self.assertEqual(len(m), 0)
+ self.assertFalse(None in m)
+ self.assertEqual(repr(m), 'immutables.Map({})')
+
+ self.assertEqual(m, self.Map())
+
+ with self.assertRaises(KeyError):
+ m.delete(None)
+
+ def test_none_set(self):
+ m = self.Map().set(None, 2)
+
+ self.assertEqual(len(m), 1)
+ self.assertTrue(None in m)
+ self.assertEqual(m[None], 2)
+
+ m = m.set(None, 1)
+
+ self.assertEqual(len(m), 1)
+ self.assertTrue(None in m)
+ self.assertEqual(m[None], 1)
+
+ m = m.delete(None)
+
+ self.assertEqual(len(m), 0)
+ self.assertEqual(m, self.Map())
+ self.assertFalse(None in m)
+
+ with self.assertRaises(KeyError):
+ m.delete(None)
+
+ def test_none_collision_1(self):
+ for level in range(7):
+ key = NoneCollision('a', level)
+ m = self.Map({None: 1, key: 2})
+
+ self.assertEqual(len(m), 2)
+ self.assertTrue(None in m)
+ self.assertEqual(m[None], 1)
+ self.assertTrue(key in m)
+ self.assertEqual(m[key], 2)
+
+ m2 = m.delete(None)
+ self.assertEqual(len(m2), 1)
+ self.assertTrue(key in m2)
+ self.assertEqual(m2[key], 2)
+ self.assertFalse(None in m2)
+ with self.assertRaises(KeyError):
+ m2.delete(None)
+
+ m3 = m2.delete(key)
+ self.assertEqual(len(m3), 0)
+ self.assertFalse(None in m3)
+ self.assertFalse(key in m3)
+ self.assertEqual(m3, self.Map())
+ self.assertEqual(repr(m3), 'immutables.Map({})')
+ with self.assertRaises(KeyError):
+ m3.delete(None)
+ with self.assertRaises(KeyError):
+ m3.delete(key)
+
+ m2 = m.delete(key)
+ self.assertEqual(len(m2), 1)
+ self.assertTrue(None in m2)
+ self.assertEqual(m2[None], 1)
+ self.assertFalse(key in m2)
+ with self.assertRaises(KeyError):
+ m2.delete(key)
+
+ m4 = m2.delete(None)
+ self.assertEqual(len(m4), 0)
+ self.assertFalse(None in m4)
+ self.assertFalse(key in m4)
+ self.assertEqual(m4, self.Map())
+ self.assertEqual(repr(m4), 'immutables.Map({})')
+ with self.assertRaises(KeyError):
+ m4.delete(None)
+ with self.assertRaises(KeyError):
+ m4.delete(key)
+
+ self.assertEqual(m3, m4)
+
+ def test_none_collision_2(self):
+ key = HashKey(not_collision, 'a')
+ m = self.Map().set(None, 1).set(key, 2)
+
+ self.assertEqual(len(m), 2)
+ self.assertTrue(key in m)
+ self.assertTrue(None in m)
+ self.assertEqual(m[key], 2)
+ self.assertEqual
+
+ m = m.set(None, 0)
+ self.assertEqual(len(m), 2)
+ self.assertTrue(key in m)
+ self.assertTrue(None in m)
+
+ for level in range(7):
+ key2 = NoneCollision('b', level)
+ self.assertFalse(key2 in m)
+ m2 = m.set(key2, 1)
+
+ self.assertEqual(len(m2), 3)
+ self.assertTrue(key in m2)
+ self.assertTrue(None in m2)
+ self.assertTrue(key2 in m2)
+ self.assertEqual(m2[key], 2)
+ self.assertEqual(m2[None], 0)
+ self.assertEqual(m2[key2], 1)
+
+ m2 = m2.set(None, 1)
+ self.assertEqual(len(m2), 3)
+ self.assertTrue(key in m2)
+ self.assertTrue(None in m2)
+ self.assertTrue(key2 in m2)
+ self.assertEqual(m2[key], 2)
+ self.assertEqual(m2[None], 1)
+ self.assertEqual(m2[key2], 1)
+
+ m2 = m2.set(None, 2)
+ self.assertEqual(len(m2), 3)
+ self.assertTrue(key in m2)
+ self.assertTrue(None in m2)
+ self.assertTrue(key2 in m2)
+ self.assertEqual(m2[key], 2)
+ self.assertEqual(m2[None], 2)
+ self.assertEqual(m2[key2], 1)
+
+ m3 = m2.delete(key)
+ self.assertEqual(len(m3), 2)
+ self.assertTrue(None in m3)
+ self.assertTrue(key2 in m3)
+ self.assertFalse(key in m3)
+ self.assertEqual(m3[None], 2)
+ self.assertEqual(m3[key2], 1)
+ with self.assertRaises(KeyError):
+ m3.delete(key)
+
+ m3 = m2.delete(key2)
+ self.assertEqual(len(m3), 2)
+ self.assertTrue(None in m3)
+ self.assertTrue(key in m3)
+ self.assertFalse(key2 in m3)
+ self.assertEqual(m3[None], 2)
+ self.assertEqual(m3[key], 2)
+ with self.assertRaises(KeyError):
+ m3.delete(key2)
+
+ m3 = m2.delete(None)
+ self.assertEqual(len(m3), 2)
+ self.assertTrue(key in m3)
+ self.assertTrue(key2 in m3)
+ self.assertFalse(None in m3)
+ self.assertEqual(m3[key], 2)
+ self.assertEqual(m3[key2], 1)
+ with self.assertRaises(KeyError):
+ m3.delete(None)
+
+ m2 = m.delete(None)
+ self.assertEqual(len(m2), 1)
+ self.assertFalse(None in m2)
+ self.assertTrue(key in m2)
+ self.assertEqual(m2[key], 2)
+ with self.assertRaises(KeyError):
+ m2.delete(None)
+
+ m2 = m.delete(key)
+ self.assertEqual(len(m2), 1)
+ self.assertFalse(key in m2)
+ self.assertTrue(None in m2)
+ self.assertEqual(m2[None], 0)
+ with self.assertRaises(KeyError):
+ m2.delete(key)
+
+ def test_none_collision_3(self):
+ for level in range(7):
+ key = NoneCollision('a', level)
+ m = self.Map({key: 2})
+
+ self.assertEqual(len(m), 1)
+ self.assertFalse(None in m)
+ self.assertTrue(key in m)
+ self.assertEqual(m[key], 2)
+ with self.assertRaises(KeyError):
+ m.delete(None)
+
+ m = m.set(None, 1)
+ self.assertEqual(len(m), 2)
+ self.assertTrue(key in m)
+ self.assertEqual(m[key], 2)
+ self.assertTrue(None in m)
+ self.assertEqual(m[None], 1)
+
+ m = m.set(None, 0)
+ self.assertEqual(len(m), 2)
+ self.assertTrue(key in m)
+ self.assertEqual(m[key], 2)
+ self.assertTrue(None in m)
+ self.assertEqual(m[None], 0)
+
+ m2 = m.delete(key)
+ self.assertEqual(len(m2), 1)
+ self.assertTrue(None in m2)
+ self.assertEqual(m2[None], 0)
+ self.assertFalse(key in m2)
+ with self.assertRaises(KeyError):
+ m2.delete(key)
+
+ m2 = m.delete(None)
+ self.assertEqual(len(m2), 1)
+ self.assertTrue(key in m2)
+ self.assertEqual(m2[key], 2)
+ self.assertFalse(None in m2)
+ with self.assertRaises(KeyError):
+ m2.delete(None)
+
+ def test_collision_4(self):
+ key2 = NoneCollision('a', 2)
+ key4 = NoneCollision('b', 4)
+ m = self.Map({key2: 2, key4: 4})
+
+ self.assertEqual(len(m), 2)
+ self.assertTrue(key2 in m)
+ self.assertTrue(key4 in m)
+ self.assertEqual(m[key2], 2)
+ self.assertEqual(m[key4], 4)
+ self.assertFalse(None in m)
+
+ m2 = m.set(None, 9)
+
+ self.assertEqual(len(m2), 3)
+ self.assertTrue(key2 in m2)
+ self.assertTrue(key4 in m2)
+ self.assertTrue(None in m2)
+ self.assertEqual(m2[key2], 2)
+ self.assertEqual(m2[key4], 4)
+ self.assertEqual(m2[None], 9)
+
+ m3 = m2.set(None, 0)
+ self.assertEqual(len(m3), 3)
+ self.assertTrue(key2 in m3)
+ self.assertTrue(key4 in m3)
+ self.assertTrue(None in m3)
+ self.assertEqual(m3[key2], 2)
+ self.assertEqual(m3[key4], 4)
+ self.assertEqual(m3[None], 0)
+
+ m3 = m2.set(key2, 0)
+ self.assertEqual(len(m3), 3)
+ self.assertTrue(key2 in m3)
+ self.assertTrue(key4 in m3)
+ self.assertTrue(None in m3)
+ self.assertEqual(m3[key2], 0)
+ self.assertEqual(m3[key4], 4)
+ self.assertEqual(m3[None], 9)
+
+ m3 = m2.set(key4, 0)
+ self.assertEqual(len(m3), 3)
+ self.assertTrue(key2 in m3)
+ self.assertTrue(key4 in m3)
+ self.assertTrue(None in m3)
+ self.assertEqual(m3[key2], 2)
+ self.assertEqual(m3[key4], 0)
+ self.assertEqual(m3[None], 9)
+
+ m3 = m2.delete(None)
+ self.assertEqual(m3, m)
+ self.assertEqual(len(m3), 2)
+ self.assertTrue(key2 in m3)
+ self.assertTrue(key4 in m3)
+ self.assertEqual(m3[key2], 2)
+ self.assertEqual(m3[key4], 4)
+ self.assertFalse(None in m3)
+ with self.assertRaises(KeyError):
+ m3.delete(None)
+
+ m3 = m2.delete(key2)
+ self.assertEqual(len(m3), 2)
+ self.assertTrue(None in m3)
+ self.assertTrue(key4 in m3)
+ self.assertEqual(m3[None], 9)
+ self.assertEqual(m3[key4], 4)
+ self.assertFalse(key2 in m3)
+ with self.assertRaises(KeyError):
+ m3.delete(key2)
+
+ m3 = m2.delete(key4)
+ self.assertEqual(len(m3), 2)
+ self.assertTrue(None in m3)
+ self.assertTrue(key2 in m3)
+ self.assertEqual(m3[None], 9)
+ self.assertEqual(m3[key2], 2)
+ self.assertFalse(key4 in m3)
+ with self.assertRaises(KeyError):
+ m3.delete(key4)
+
+ def test_none_mutation(self):
+ key2 = NoneCollision('a', 2)
+ key4 = NoneCollision('b', 4)
+ key = NoneCollision('c', -1)
+ m = self.Map({key: -1, key2: 2, key4: 4, None: 9})
+
+ with m.mutate() as mm:
+ self.assertEqual(len(mm), 4)
+ self.assertTrue(key in mm)
+ self.assertTrue(key2 in mm)
+ self.assertTrue(key4 in mm)
+ self.assertTrue(None in mm)
+ self.assertEqual(mm[key2], 2)
+ self.assertEqual(mm[key4], 4)
+ self.assertEqual(mm[key], -1)
+ self.assertEqual(mm[None], 9)
+
+ for k in m:
+ mm[k] = -mm[k]
+
+ self.assertEqual(len(mm), 4)
+ self.assertTrue(key in mm)
+ self.assertTrue(key2 in mm)
+ self.assertTrue(key4 in mm)
+ self.assertTrue(None in mm)
+ self.assertEqual(mm[key2], -2)
+ self.assertEqual(mm[key4], -4)
+ self.assertEqual(mm[key], 1)
+ self.assertEqual(mm[None], -9)
+
+ for k in m:
+ del mm[k]
+ self.assertEqual(len(mm), 3)
+ self.assertFalse(k in mm)
+ for n in m:
+ if n != k:
+ self.assertTrue(n in mm)
+ self.assertEqual(mm[n], -m[n])
+ with self.assertRaises(KeyError):
+ del mm[k]
+ mm[k] = -m[k]
+ self.assertEqual(len(mm), 4)
+ self.assertTrue(k in mm)
+ self.assertEqual(mm[k], -m[k])
+
+ for k in m:
+ mm[k] = -mm[k]
+
+ self.assertEqual(len(mm), 4)
+ self.assertTrue(key in mm)
+ self.assertTrue(key2 in mm)
+ self.assertTrue(key4 in mm)
+ self.assertTrue(None in mm)
+ self.assertEqual(mm[key2], 2)
+ self.assertEqual(mm[key4], 4)
+ self.assertEqual(mm[key], -1)
+ self.assertEqual(mm[None], 9)
+
+ for k in m:
+ mm[k] = -mm[k]
+
+ self.assertEqual(len(mm), 4)
+ self.assertTrue(key in mm)
+ self.assertTrue(key2 in mm)
+ self.assertTrue(key4 in mm)
+ self.assertTrue(None in mm)
+ self.assertEqual(mm[key2], -2)
+ self.assertEqual(mm[key4], -4)
+ self.assertEqual(mm[key], 1)
+ self.assertEqual(mm[None], -9)
+
+ m2 = mm.finish()
+
+ self.assertEqual(set(m), set(m2))
+ self.assertEqual(len(m2), 4)
+ self.assertTrue(key in m2)
+ self.assertTrue(key2 in m2)
+ self.assertTrue(key4 in m2)
+ self.assertTrue(None in m2)
+ self.assertEqual(m2[key2], -2)
+ self.assertEqual(m2[key4], -4)
+ self.assertEqual(m2[key], 1)
+ self.assertEqual(m2[None], -9)
+
+ for k, v in m.items():
+ self.assertTrue(k in m2)
+ self.assertEqual(m2[k], -v)
+
+ def test_iterators(self):
+ key2 = NoneCollision('a', 2)
+ key4 = NoneCollision('b', 4)
+ key = NoneCollision('c', -1)
+ m = self.Map({key: -1, key2: 2, key4: 4, None: 9})
+
+ self.assertEqual(len(m), 4)
+ self.assertTrue(key in m)
+ self.assertTrue(key2 in m)
+ self.assertTrue(key4 in m)
+ self.assertTrue(None in m)
+ self.assertEqual(m[key2], 2)
+ self.assertEqual(m[key4], 4)
+ self.assertEqual(m[key], -1)
+ self.assertEqual(m[None], 9)
+
+ s = set(m)
+ self.assertEqual(len(s), 4)
+ self.assertEqual(s, set([None, key, key2, key4]))
+
+ sk = set(m.keys())
+ self.assertEqual(s, sk)
+
+ sv = set(m.values())
+ self.assertEqual(len(sv), 4)
+ self.assertEqual(sv, set([-1, 2, 4, 9]))
+
+ si = set(m.items())
+ self.assertEqual(len(si), 4)
+ self.assertEqual(si,
+ set([(key, -1), (key2, 2), (key4, 4), (None, 9)]))
+
+ d = {key: -1, key2: 2, key4: 4, None: 9}
+ self.assertEqual(dict(m.items()), d)
+
+
+class PyMapNoneTest(BaseNoneTest, unittest.TestCase):
+
+ Map = PyMap
+
+
+try:
+ from immutables._map import Map as CMap
+except ImportError:
+ CMap = None
+
+
+@unittest.skipIf(CMap is None, 'C Map is not available')
+class CMapNoneTest(BaseNoneTest, unittest.TestCase):
+
+ Map = CMap
+
+
+if __name__ == "__main__":
+ unittest.main()