├── .clang-format
├── .dir-locals.el
├── .gitignore
├── .gitmodules
├── CMakeLists.txt
├── COPYING
├── README.rst
├── examples
└── readme_example.py
├── micro-bench
├── setattr.py
├── tree_creation_and_teardown.py
├── tree_creation_and_teardown_no_arena.py
└── tree_creation_and_teardown_regular.py
├── quelling_blade
├── __init__.py
└── arena_allocatable.cc
└── setup.py
/.clang-format:
--------------------------------------------------------------------------------
1 | BasedOnStyle: llvm
2 |
3 | BreakBeforeBraces: Custom
4 | BraceWrapping:
5 | BeforeElse: true
6 | BeforeCatch: true
7 |
8 | AccessModifierOffset: -4
9 | AlignEscapedNewlines: Right
10 | AllowAllParametersOfDeclarationOnNextLine: false
11 | AllowShortBlocksOnASingleLine: false
12 | AllowShortFunctionsOnASingleLine: Empty
13 | AlwaysBreakTemplateDeclarations: true
14 | BinPackArguments: false
15 | BinPackParameters: false
16 | BreakConstructorInitializers: BeforeColon
17 | ColumnLimit: 90
18 | ConstructorInitializerAllOnOneLineOrOnePerLine: true
19 | IndentWidth: 4
20 | NamespaceIndentation: None
21 | PointerAlignment: Left
22 | SortIncludes: true
23 | SortUsingDeclarations: true
24 | SpacesBeforeTrailingComments: 2
25 | SpacesInSquareBrackets: false
26 | SpaceAfterCStyleCast: true
27 | SpaceAfterTemplateKeyword: false
28 |
29 | PenaltyBreakAssignment: 60
30 | PenaltyBreakBeforeFirstCallParameter: 175
31 |
--------------------------------------------------------------------------------
/.dir-locals.el:
--------------------------------------------------------------------------------
1 | ((nil . ((eval . (add-to-list 'auto-mode-alist '("\\.h\\'" . c++-mode)))))
2 | (python-mode . ((fill-column . 79)))
3 | (c++-mode . ((c-basic-offset . 4)
4 | (fill-column . 90)
5 | (flycheck-gcc-language-standard . "gnu++17")
6 | (eval . (progn
7 | (c-set-offset 'innamespace 0)
8 |
9 | (defun do-shell (s)
10 | ;; Helper for running a shell command and getting the first line
11 | ;; of its output.
12 | (substring (shell-command-to-string s) 0 -1))
13 |
14 | (setq flycheck-gcc-include-path
15 | (let* ((python-include
16 | (do-shell "python -c \"import sysconfig; print(sysconfig.get_path('include'))\""))
17 | (numpy-include
18 | (do-shell "python -c \"import numpy; print(numpy.get_include())\""))
19 | (project-root
20 | (do-shell "git rev-parse --show-toplevel")))
21 | (append
22 | (list python-include numpy-include)))))))))
23 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[co]
2 | *.egg
3 | *.egg-info
4 | dist
5 | build
6 | eggs
7 | cover
8 | parts
9 | bin
10 | var
11 | sdist
12 | develop-eggs
13 | .installed.cfg
14 | *.o
15 | *.so
16 | .cache
17 | TAGS
18 | perf.data*
19 | *.annotation
20 | venv/*
21 | venv-dbg/*
22 | .gdb_history
23 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "submodules/libpy"]
2 | path = submodules/libpy
3 | url = git@github.com:llllllllll/libpy
4 | [submodule "submodules/abseil-cpp"]
5 | path = submodules/abseil-cpp
6 | url = git@github.com:abseil/abseil-cpp
7 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.10)
2 |
3 | project(quelling-blade)
4 |
5 | set(CMAKE_POSITION_INDEPENDENT_CODE ON)
6 | set(CMAKE_CXX_STANDARD 17)
7 | if(NOT CMAKE_BUILD_TYPE)
8 | message(STATUS "No build type selected, default to Debug")
9 | set(CMAKE_BUILD_TYPE Debug)
10 | endif()
11 |
12 |
13 | set(CMAKE_CXX_FLAGS
14 | "${CMAKE_CXX_FLAGS} \
15 | -Werror \
16 | -Wall -Wextra \
17 | -Wsign-compare -Wsuggest-override \
18 | -Wno-missing-field-initializers \
19 | -g \
20 | -Wparentheses -Waggressive-loop-optimizations")
21 |
22 | set(CMAKE_CXX_FLAGS_DEBUG
23 | "-O0 -fmax-errors=5")
24 |
25 | set(CMAKE_CXX_FLAGS_SANITIZE
26 | "${CMAKE_CXX_FLAGS_DEBUG} -fsanitize=address -fsanitize=undefined")
27 |
28 | set(CMAKE_CXX_FLAGS_RELEASE
29 | "-O3 -fstack-protector-strong -flto -march=x86-64 -mtune=native -DNDEBUG")
30 |
31 |
32 | # Find python interpreter and libraries.
33 | find_package(PythonInterp 3.8 REQUIRED)
34 | find_package(PythonLibs 3.8 REQUIRED)
35 |
36 | # Get Python ABI suffix.
37 | exec_program(
38 | ${PYTHON_EXECUTABLE}
39 | ARGS "-c \"import sysconfig; print(sysconfig.get_config_var('SOABI'))\""
40 | OUTPUT_VARIABLE PYTHON_ABI_SUFFIX
41 | RETURN_VALUE PYTHON_ABI_DETECTION_FAILED
42 | )
43 |
44 | if(PYTHON_ABI_DETECTION_FAILED)
45 | message(FATAL_ERROR "Failed to detect Python ABI suffix.")
46 | endif()
47 |
48 | # Add abseil targets.
49 | add_subdirectory(submodules/abseil-cpp)
50 |
51 | # Add target for our extension.
52 | add_library(c_extension SHARED "quelling_blade/arena_allocatable.cc")
53 |
54 | set_target_properties(
55 | c_extension
56 | PROPERTIES
57 | PREFIX ""
58 | OUTPUT_NAME "arena_allocatable.${PYTHON_ABI_SUFFIX}"
59 | LINKER_LANGUAGES C
60 | )
61 |
62 | target_include_directories(
63 | c_extension PUBLIC
64 | ${PYTHON_INCLUDE_DIRS}
65 | )
66 |
67 | target_link_libraries(
68 | c_extension
69 | pthread
70 | ${PYTHON_LIBRARIES}
71 | absl::hash
72 | absl::flat_hash_map
73 | )
74 |
--------------------------------------------------------------------------------
/COPYING:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | ====================
2 | quelling blade (wip)
3 | ====================
4 |
5 | Arena allocatable user defined types in Python.
6 |
7 | The axe of a fallen gnome, it allows you to effectively maneuver the forest.
8 |
9 | What?
10 | =====
11 |
12 | CPython Implementation Details
13 | ------------------------------
14 |
15 | CPython allocates all objects on the heap and uses a reference counting garbage collector.
16 | When an object hits 0 references, it is deallocated immediately.
17 | CPython also has a mechanism for detecting cycles and freeing them.
18 |
19 | Instances of types defined in Python (without ``__slots__``) are laid out memory as:
20 |
21 | .. code-block:: c
22 |
23 | struct PyObject {
24 | uintptr_t _gc_next; /* support for the cycle detector */
25 | uintptr_t _gc_prev; /* support for the cycle detector */
26 | Py_ssize_t ob_refcnt; /* number of references to this object */
27 | PyTypeObject* ob_type; /* pointer to the Python type of this object */
28 | PyObject* dict; /* the ``__dict__`` for this object */
29 | PyWeakReference** weaklist; /* array of weak references to this object */
30 | };
31 |
32 |
33 | A PyObject takes a total of 48 bytes of memory.
34 | However, an empty ``dict`` in python takes up 64 bytes so an object requires a minimum of 112 bytes.
35 |
36 |
37 | Note: A ``PyObject*`` points to the ``ob_refcnt`` field, the ``_gc_*`` fields live *in front* of the object, but the memory is still being used.
38 |
39 |
40 | Potential Issues Caused By This Model
41 | -------------------------------------
42 |
43 | Lots of Heap Allocations
44 | ~~~~~~~~~~~~~~~~~~~~~~~~
45 |
46 | A consequence of allocating all objects on the heap is that objects that are related are not close to each other in memory, increasing the amount of main memory reads that must happen.
47 | The object's ``__dict__`` is a pointer to the dictionary instead of being embedded in the object directly.
48 | Because the ``__dict__`` is a pointer, accessing any attribute of an object will require a second pointer dereference to the dictionary which is in an arbitrary location in the heap [1]_.
49 | The Python ``dict`` object itself has an internal heap allocation to store the contents of the dictionary which must also be accessed.
50 | This adds a lot of overhead on each attribute access.
51 | Heap fragmentation is hard to observe in micro benchmarks because accessing the same attribute from the same object in a loop will force all of the relevant objects to stay in the processor's cache and the dereferences will appear very fast.
52 | Real programs do not usually access objects in this way, and instead access an attribute only once or twice before moving on to another object which is not in the cache.
53 |
54 | Slow Destruction
55 | ~~~~~~~~~~~~~~~~
56 |
57 | A consequence of a reference counted garbage collector is that it makes deallocation of graphs expensive.
58 | In this context, a graph is any Python object which contains more Python objects.
59 | Once the root node reaches zero references, the graph must be unwound by decrementing the reference count of each of its children.
60 | If the node is the sole owner of any of its children, then that node must unwind itself by decrementing the references of its own children.
61 | This can make deallocation of the graph linear in the number of nodes.
62 |
63 | Another place where this garbage collection scheme can be an issue is when latency is more important than keeping the memory usage low.
64 | An example of this is a webserver which takes in some input, does a small amount of processing, and returns some data to the user.
65 | For each request, the programmer may know that the memory cannot exceed some reasonable bound and therefore it would be safe to keep all of the temporary objects alive until the request has been served.
66 | Instead, CPython will spend time freeing each intermediate object along the way.
67 |
68 | Arena Allocation
69 | ----------------
70 |
71 | Arena allocation works by creating distinct regions, called arenas, to provide storage for a collection of objects.
72 | The arena is not released until all of the objects in the arena are done being used.
73 | Arenas can guarantee that data that is likely to be used together is physically close together.
74 | Having related objects close to each other in memory is better for the CPU memory cache and reduces the number of trips to main memory.
75 |
76 | Another advantage of arena allocation is that the entire arena may be released at once.
77 | If the data structures being used inside the arena are designed correctly, each individual destructor need not be called.
78 | This transforms and ``O(n)`` deallocation into an ``O(1)`` deallocation.
79 |
80 | API and Usage
81 | =============
82 |
83 | Quelling blade provides two types: ``ArenaAllocatable`` and ``Arena``.
84 |
85 | ``ArenaAllocatable``
86 | --------------------
87 |
88 | ``ArenaAllocatable`` is a type which can be used instead of ``object`` as a base class to create new types whose instances may be allocated in an arena.
89 | ``ArenaAllocatable`` subclasses behave like normal Python types with the following restrictions:
90 |
91 | - cannot use ``__slots__``
92 | - cannot access the ``__dict__`` directly (through ``ob.__dict__`` or ``vars(ob)``).
93 |
94 | ``Arena``
95 | ---------
96 |
97 | The ``Arena`` type is meant to be used as a context manager which manages the scope of an arena.
98 | The constructor for ``Arena`` takes either a single subclass of ``ArenaAllocatable`` or a list of subclasses of ``ArenaAllocatable``.
99 | Inside the ``Arena`` context, all new instances of any of the provided types (or subclasses of any of the provided types) will be allocated inside the same arena.
100 | None of the objects will be deallocated until the later of:
101 |
102 | 1. The arena context closes (or the arena object is deallocated)
103 | 2. None of the objects in the arena are available Python anymore.
104 |
105 | Escaped Instances
106 | -----------------
107 |
108 | Extension modules should never allow a Python programmer to crash the program or otherwise violate the memory safety of Python.
109 | Normal C++ arena allocators would not go out of their way to detect objects escaping the arena.
110 | Instead the documentation would advise programmers on how to use the tool safely.
111 | Python programmers are not used to dealing with the details of object lifetimes while programming in Python.
112 | Therefore, quelling blade must ensure that the state of the program is valid when objects escape an arena context.
113 |
114 | When an object lives past the end of the ``Arena`` context manager where it was created, the object becomes an "owner" of its own arena.
115 | None of the objects in the arena can be deallocated until there are no more escaped references
116 | None of the attributes of any ``ArenaAllocatable`` object will be released until the entire arena can be safely destroyed.
117 | When the last escaped reference is released, the entire arena will be torn down at once, freeing all memory and releasing all attributes.
118 |
119 | When quelling blade detects that some objects have been released, a ``RuntimeWarning`` will be issued with the number of escaped references.
120 | At this point, the programmer can attempt to debug their program to find where the objects are escaping to Python.
121 |
122 | Example Usage
123 | -------------
124 |
125 | In the following example, a binary tree class named ``Node`` is defined.
126 | ``Node`` is a subclass of ``quelling_blade.ArenaAllocatable``.
127 | The ``Node`` type holds three attributes: a value, a left child, and a right child.
128 | The value may be any type of Python object.
129 | The left and right children may be either ``Node`` objects or ``None``.
130 | The ``do_work`` function creates a tree and then sorts the nodes to be used as a binary search tree.
131 | This work load both creates nodes, access attributes from them, and then creates new nodes.
132 | This is meant to simulate a real work load that uses trees.
133 |
134 | .. code-block:: python
135 |
136 | import quelling_blade as qb
137 |
138 |
139 | class Node(qb.ArenaAllocatable):
140 | """A simple binary tree node.
141 |
142 | Parameters
143 | ----------
144 | value : any
145 | The value of the node.
146 | left : Node or None, optional
147 | The left side of the tree.
148 | right : Node or None, optional
149 | The right side of the tree.
150 | """
151 | def __init__(self, value, left=None, right=None):
152 | self.value = value
153 | self.left = left
154 | self.right = right
155 |
156 | def pretty(self, level=0):
157 | t_indent = ' ' * level
158 | v_indent = ' ' * (level + 1)
159 | if self.left is None:
160 | left = f'{v_indent}None'
161 | else:
162 | left = self.left.pretty(level + 1)
163 | if self.right is None:
164 | right = f'{v_indent}None'
165 | else:
166 | right = self.right.pretty(level + 1)
167 |
168 | return (
169 | f'{t_indent}{type(self).__name__}(\n'
170 | f'{v_indent}{self.value!r},\n'
171 | f'{left},\n'
172 | f'{right},\n'
173 | f'{t_indent})'
174 | )
175 |
176 | def pprint(self):
177 | print(self.pretty())
178 |
179 | def __iter__(self):
180 | yield self
181 | if self.left is not None:
182 | yield from self.left
183 | if self.right is not None:
184 | yield from self.right
185 |
186 |
187 | def create_tree():
188 | """Create a binary tree with letters.
189 | """
190 | return Node(
191 | 'a',
192 | Node(
193 | 'b',
194 | Node(
195 | 'c',
196 | Node('d'),
197 | Node('e'),
198 | ),
199 | Node(
200 | 'f',
201 | Node('g'),
202 | Node('h'),
203 | ),
204 | ),
205 | Node(
206 | 'i',
207 | Node(
208 | 'j',
209 | Node('k'),
210 | Node('l'),
211 | ),
212 | Node(
213 | 'm',
214 | Node('n'),
215 | Node('o'),
216 | ),
217 | ),
218 | )
219 |
220 |
221 | def _sort_rec(vals):
222 | if len(vals) == 0:
223 | return None
224 | if len(vals) == 1:
225 | return Node(vals[0])
226 |
227 | pivot = len(vals) // 2
228 | return Node(
229 | vals[pivot],
230 | _sort_rec(vals[:pivot]),
231 | _sort_rec(vals[pivot + 1:]),
232 | )
233 |
234 |
235 | def sort(tree):
236 | """Sort a tree.
237 | """
238 | return _sort_rec(sorted((n.value for n in tree)))
239 |
240 |
241 | def do_work(msg, ret=False):
242 | """A function which creates a tree and processes it.
243 | """
244 | print(msg)
245 |
246 | # allocate some objects
247 | tree = create_tree()
248 |
249 | # process the objects and allocate some more
250 | sorted_tree = sort(tree)
251 |
252 | # Both ``tree`` and ``sorted_tree`` fall out of scope here. This should
253 | # recursively destroy all of the nodes created.
254 |
255 | if ret:
256 | return sorted_tree
257 | return None
258 |
259 |
260 | # do work like normal, objects are allocated whereever and there is nothing
261 | # special about how ``Node`` objects are allocated in ``do_work``.
262 | do_work('global scope')
263 |
264 |
265 | with qb.Arena(Node):
266 | # In this context, all ``Node`` instances, and instances of subclasses
267 | # of ``Node``, are allocated in a shared arena.
268 |
269 | # Do work in an arena. This means that the ``Node`` objects in ``do_work``
270 | # is allocated in the same arena, which means that all the nodes will be
271 | # laid out in set of a contiguous buffers. When ``tree`` and
272 | # ``sorted_tree`` fall out of scope, the objects in the arena will be
273 | # marked as "dead", but no memory is deallocated.
274 | do_work('in context')
275 | # When the ``qb.Arena`` context is exited, check to make sure all the
276 | # objects are dead. If any objects are alive, make them an owner of the
277 | # entire arena and throw a ``PerformanceWarning``. Until the escaped objects
278 | # are deallocated, the entire arena will stay alive. If the context is used
279 | # correctly, all the objects will be dead already so the storage can
280 | # be released in one shot.
281 |
282 |
283 | with qb.Arena(Node):
284 | # Bind the result of ``do_work`` to a variable that will outlive the
285 | # ``qb.Arena`` context. Memory cannot be freed when the context is exited
286 | # because that would invalidate `the `escaped`` object. Instead, warn the
287 | # user that an object has escaped and make the object own *all* of the
288 | # memory. This means that none of the objects in the arena will be released
289 | # until ``escaped`` is destroyed.
290 | escaped = do_work('escape context', ret=True)
291 |
292 |
293 | produces:
294 |
295 | .. code-block::
296 |
297 | global scope
298 | in context
299 | escape context
300 | examples/readme_example.py:152: RuntimeWarning: 1 object is still alive at arena exit
301 | escaped = do_work('escape context', ret=True)
302 |
303 | Design
304 | ======
305 |
306 | Quelling blade aims to make allocation, reads, writes, and destruction of objects faster than default Python objects.
307 |
308 | ``ArenaAllocatableMeta``
309 | ------------------------
310 |
311 | Quelling blade uses a metaclass for types that subclass ``ArenaAllocatable``.
312 | The metaclass is needed to store C++ data on the class objects themselves.
313 | Each ``ArenaAllocatable`` type (instances of ``ArenaAllocatableMeta``) contains a regular Python type object's fields with the addition of a stack of C++ arenas.
314 | The stack initially begins empty, meaning instances should be allocated globally and freed when their reference count hits zero.
315 | The arena stack is implemented as a ``std::vector>``.
316 | A shared pointer is used to implement reference counted lifetime for the arena.
317 | The reference counting on the C++ arena will be discussed more when describing ``ArenaAllocatable`` instances.
318 |
319 | To allocated a new ``ArenaAllocatable`` instance, the arena stack must be checked.
320 | If the stack is empty, instances will be allocated globally and have normal Python object lifetime rules.
321 | If the stack is non-empty, the instance will be allocated in the arena on the top of the stack [2]_.
322 |
323 | ``Arena``
324 | ---------
325 |
326 | Slabs
327 | ~~~~~
328 |
329 | An *arena* is a collection of one or more fixed-size allocations.
330 | Each fixed-size allocation in the arena is called a *slab*.
331 | Each *slab* in an arena has the same capacity.
332 | An arena may grow to contain an arbitrary number of slabs, but the number of slabs will never decrease.
333 | The last slab added to the arena is known as the *active slab*.
334 | Each slab contains a size which indicates how many bytes have been allocated out of the slab.
335 |
336 | To allocate a new object in an arena:
337 |
338 | - If the allocation size is greater than the arena's slab capacity: fail.
339 | - If there is room, increment the size of the slab by the number of bytes requested plus any alignment padding bytes.
340 | - If there is not room in the active slab, create a slab and mark it as the active slab.
341 | Increment the size of the new active slab by the number of bytes requested plus any alignment padding bytes.
342 |
343 | External Objects
344 | ~~~~~~~~~~~~~~~~
345 |
346 | In addition to slabs, each arena contains a multiset of Python object references called the *external references*.
347 | The entries in the external references multiset are pointers to objects that are owned by the objects that are allocated in the arena.
348 | For example: if a there is a Python object allocated in the arena with two attributes
349 | ``a = 'attr'`` and ``b = None``, then there will be four entries in the external references:
350 |
351 | - ``'attr'``
352 | - ``None``
353 | - ``'a'`` (attribute name)
354 | - ``'b'`` (attribute name)
355 |
356 | The attributes are not stored as Python objects because Python already requires that attribute names be ``str`` objects.
357 |
358 | When the arena entire arena is destroyed, each reference in the external references will be released.
359 |
360 | The memory for this multiset is allocated out of the arena itself so that all of the operations on objects in the arena stay within the arena.
361 |
362 | Arena Stack
363 | ~~~~~~~~~~~
364 |
365 | When the ``Arena`` Python context manager is entered, a new C++ arena is allocated behind a ``std::shared_ptr``.
366 | For each type that is going to participate in this arena, the new C++ arena is pushed onto the type's arena stack.
367 | The full set of types is not just the explicitly referenced types, but also all of the subclasses of these types.
368 |
369 | When the context is exited, the top entry is popped from each type's arena [2]_.
370 | This may not free the underlying C++ arena yet.
371 | The C++ arena is allocated behind a reference counted pointer, and there may still be references that exist at this point.
372 | If there are more references to the arena when the context is closed, it means that instances have escaped the arena.
373 |
374 | ``ArenaAllocatable``
375 | --------------------
376 |
377 | Arena allocatable instances are laid out in memory differently from regular Python objects.
378 | Arena allocatable instances are laid out in memory like:
379 |
380 | .. code-block:: c++
381 |
382 | // Quelling Blade object
383 | struct arena_allocatable {
384 | Py_ssize_t ob_refcnt;
385 | PyTypeObject* ob_type;
386 | std::shared_ptr owning_arena;
387 | absl::flat_hash_map;
388 | };
389 |
390 | // Original PyObject
391 | struct PyObject {
392 | uintptr_t _gc_next; /* support for the cycle detector */
393 | uintptr_t _gc_prev; /* support for the cycle detector */
394 | Py_ssize_t ob_refcnt; /* number of references to this object */
395 | PyTypeObject* ob_type; /* pointer to the Python type of this object */
396 | PyObject* dict; /* the ``__dict__`` for this object */
397 | PyWeakReference** weaklist; /* array of weak references to this object */
398 | };
399 |
400 | Like regular Python objects, they contain a pointer to their Python type object and a reference count.
401 | Unlike regular Python objects, the attributes are not stored in an out-of-band Python dictionary.
402 | Instead, ``ArenaAllocatable`` objects embed a C++ dictionary in the same allocation as the object itself.
403 | This reduces the number of dereferences required to find an attribute.
404 |
405 | Detecting Escaped Objects
406 | ~~~~~~~~~~~~~~~~~~~~~~~~~
407 |
408 | ``ArenaAllocatable`` instances use the ``ob_refcnt`` field slightly differently from regular Python objects.
409 | Instead of representing the total number of references, it represents only the references that are not owned by objects in the arena.
410 | When an object has a non-zero reference count, meaning it has escaped the arena, the ``owning_arena`` field is set to be an owning reference to the arena in which the object was allocated.
411 | When an ``ArenaAllocatable`` object is stored as an attribute of another ``ArenaAllocatable`` object which was allocated from the same arena, the reference count is *not* incremented.
412 | ``ArenaAllocatable.tp_dealloc``, the function called when an object's reference count reaches 0, is a nop when the instance was allocated in an arena.
413 | ``ArenaAllocatable.tp_dealloc`` will leave the object in a usable state and all external references are preserved.
414 |
415 | The following methods have extra functionality to support this arena lifetime management and escape detection:
416 |
417 | ``tp_new``
418 | ``````````
419 |
420 | When a new instance is allocated, the ``owning_arena`` is set to be an owning reference to the arena the object was allocated in.
421 | If the object is being allocated globally, this is set to ``nullptr``.
422 | New instances start with a reference count of 1 because they begin in an "escaped" state.
423 |
424 | ``tp_setter``
425 | `````````````
426 |
427 | If the object being stored on the arena is also allocated within the same arena, the reference count is not incremented.
428 |
429 | ``tp_getattr``
430 | ``````````````
431 |
432 | If the attribute being returned has a reference count of 0, we assert that it was allocated in the same arena as ``self``.
433 | After the assertion, we set the ``owning_arena`` field to a new owning reference to the owning arena.
434 | Then, the reference count is incremented back to 1 and the object is returned to Python.
435 |
436 | ``tp_dealloc``
437 | ``````````````
438 |
439 | If the object was allocated in an arena, reset the ``owning_arena`` pointer to drop a reference to the arena.
440 |
441 |
442 | To Do
443 | =====
444 |
445 | - ENH: support weakrefs
446 | - ENH: make ``Arena`` allocator stack thread or context local
447 | - BUG: implement ``tp_traverse`` on the ``qb.Arena`` object
448 | - BUG: implement ``tp_traverse`` for escaped arena allocatable instances
449 | - BUG: fix arena context teardown in non-stack order, e.g.: enter a, enter b, exit a, exit b.
450 | See [2]_.
451 |
452 |
453 | Notes
454 | =====
455 |
456 | .. [1] Actually, two more pointers must be dereferenced to do an attribute lookup.
457 | When an attribute is looked up, first the ``ob_type``\'s ``__dict__`` is checked to see if there is an object that implements both ``tp_descr_get`` and ``tp_descr_set`` with the name being looked up.
458 | If so, that object's ``tp_descr_get`` is called to return the attribute.
459 | This is to support the descriptor protocol.
460 |
461 | .. [2] This currently a bug.
462 | The ``Arena`` object should hold onto the smart pointer and remove it from the vector by search from the right.
463 | This provide more reasonable semantics for:
464 |
465 | - enter arena A
466 | - enter arena B
467 | - close arena A
468 | - close arena B
469 |
470 | Currently, the closing of any ``Arena`` context just closes the most recently opened context.
471 | Instead, it should close the same arena that it opened.
472 |
--------------------------------------------------------------------------------
/examples/readme_example.py:
--------------------------------------------------------------------------------
1 | import quelling_blade as qb
2 |
3 |
4 | class Node(qb.ArenaAllocatable):
5 | """A simple binary tree node.
6 |
7 | Parameters
8 | ----------
9 | value : any
10 | The value of the node.
11 | left : Node or None, optional
12 | The left side of the tree.
13 | right : Node or None, optional
14 | The right side of the tree.
15 | """
16 | def __init__(self, value, left=None, right=None):
17 | self.value = value
18 | self.left = left
19 | self.right = right
20 |
21 | def pretty(self, level=0):
22 | t_indent = ' ' * level
23 | v_indent = ' ' * (level + 1)
24 | if self.left is None:
25 | left = f'{v_indent}None'
26 | else:
27 | left = self.left.pretty(level + 1)
28 | if self.right is None:
29 | right = f'{v_indent}None'
30 | else:
31 | right = self.right.pretty(level + 1)
32 |
33 | return (
34 | f'{t_indent}{type(self).__name__}(\n'
35 | f'{v_indent}{self.value!r},\n'
36 | f'{left},\n'
37 | f'{right},\n'
38 | f'{t_indent})'
39 | )
40 |
41 | def pprint(self):
42 | print(self.pretty())
43 |
44 | def __iter__(self):
45 | yield self
46 | if self.left is not None:
47 | yield from self.left
48 | if self.right is not None:
49 | yield from self.right
50 |
51 |
52 | def create_tree():
53 | """Create a binary tree with letters.
54 | """
55 | return Node(
56 | 'a',
57 | Node(
58 | 'b',
59 | Node(
60 | 'c',
61 | Node('d'),
62 | Node('e'),
63 | ),
64 | Node(
65 | 'f',
66 | Node('g'),
67 | Node('h'),
68 | ),
69 | ),
70 | Node(
71 | 'i',
72 | Node(
73 | 'j',
74 | Node('k'),
75 | Node('l'),
76 | ),
77 | Node(
78 | 'm',
79 | Node('n'),
80 | Node('o'),
81 | ),
82 | ),
83 | )
84 |
85 |
86 | def _sort_rec(vals):
87 | if len(vals) == 0:
88 | return None
89 | if len(vals) == 1:
90 | return Node(vals[0])
91 |
92 | pivot = len(vals) // 2
93 | return Node(
94 | vals[pivot],
95 | _sort_rec(vals[:pivot]),
96 | _sort_rec(vals[pivot + 1:]),
97 | )
98 |
99 |
100 | def sort(tree):
101 | """Sort a tree.
102 | """
103 | return _sort_rec(sorted((n.value for n in tree)))
104 |
105 |
106 | def do_work(msg, ret=False):
107 | """A function which creates a tree and processes it.
108 | """
109 | print(msg)
110 |
111 | # allocate some objects
112 | tree = create_tree()
113 |
114 | # process the objects and allocate some more
115 | sorted_tree = sort(tree)
116 |
117 | # Both ``tree`` and ``sorted_tree`` fall out of scope here. This should
118 | # recursively destroy all of the nodes created.
119 |
120 | if ret:
121 | return sorted_tree
122 | return None
123 |
124 |
125 | # do work like normal, objects are allocated whereever and there is nothing
126 | # special about how ``Node`` objects are allocated in ``do_work``.
127 | do_work('global scope')
128 |
129 |
130 | with qb.Arena(Node):
131 | # In this context, all ``Node`` instances, and instances of subclasses
132 | # of ``Node``, are allocated in a shared arena.
133 |
134 | # Do work in an arena. This means that the ``Node`` objects in ``do_work``
135 | # is allocated in the same arena, which means that all the nodes will be
136 | # laid out in set of a contiguous buffers. When ``tree`` and
137 | # ``sorted_tree`` fall out of scope, the objects in the arena will be
138 | # marked as "dead", but no memory is deallocated.
139 | do_work('in context')
140 | # When we exit the ``qb.Arena`` context we check to make sure all the
141 | # objects are dead. If any objects are alive, we make them an owner of the
142 | # entire arena and throw a ``PerformanceWarning``. Until the escaped objects
143 | # are deallocated, the entire arena will stay alive. If we have used the
144 | # context correctly, all the objects will be dead already so we can release
145 | # the storage in one shot.
146 |
147 |
148 | with qb.Arena(Node):
149 | # Bind the result of ``do_work`` to a variable that will outlive the
150 | # ``qb.Arena`` context. Memory cannot be freed when we exit the context
151 | # because that would invalidate `the `escaped`` object. Instead, warn the
152 | # user that an object has escaped and make the object own *all* of the
153 | # memory. This means that none of the objects in the arena will be released
154 | # until ``escaped`` is destroyed.
155 | escaped = do_work('escape context', ret=True)
156 |
--------------------------------------------------------------------------------
/micro-bench/setattr.py:
--------------------------------------------------------------------------------
1 | from quelling_blade.arena_allocatable import ArenaAllocatable
2 |
3 | arena = ArenaAllocatable.arena(2 ** 28)
4 | ob = ArenaAllocatable()
5 |
6 | for _ in range(60000000):
7 | ob.a = 1
8 |
--------------------------------------------------------------------------------
/micro-bench/tree_creation_and_teardown.py:
--------------------------------------------------------------------------------
1 | from quelling_blade.arena_allocatable import ArenaAllocatable, Arena
2 |
3 |
4 | def f():
5 | for _ in range(10000):
6 | with Arena(ArenaAllocatable, 2 ** 32):
7 | root = ob = ArenaAllocatable()
8 | for _ in range(20000):
9 | new = ArenaAllocatable()
10 | ob.a = new
11 | ob = new
12 | del new
13 | del ob
14 | del root # actually release the tree
15 |
16 |
17 | f()
18 |
--------------------------------------------------------------------------------
/micro-bench/tree_creation_and_teardown_no_arena.py:
--------------------------------------------------------------------------------
1 | from quelling_blade.arena_allocatable import ArenaAllocatable
2 |
3 |
4 | def f():
5 | for _ in range(10000):
6 | root = ob = ArenaAllocatable()
7 | for _ in range(20000):
8 | new = ArenaAllocatable()
9 | ob.a = new
10 | ob = new
11 | del new
12 | del ob
13 | del root # actually release the tree
14 |
15 |
16 | f()
17 |
--------------------------------------------------------------------------------
/micro-bench/tree_creation_and_teardown_regular.py:
--------------------------------------------------------------------------------
1 | class Node:
2 | pass
3 |
4 |
5 | def f():
6 | for _ in range(10000):
7 | root = ob = Node()
8 | for _ in range(20000):
9 | new = Node()
10 | ob.a = new
11 | ob = new
12 | del new
13 | del ob
14 | del root # actually release the tree
15 |
16 |
17 | f()
18 |
--------------------------------------------------------------------------------
/quelling_blade/__init__.py:
--------------------------------------------------------------------------------
1 | from .arena_allocatable import *
2 |
--------------------------------------------------------------------------------
/quelling_blade/arena_allocatable.cc:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 | #include
6 | #include
7 |
8 | #include
9 | #include
10 |
11 | namespace qb {
12 | template
13 | class owned_ref;
14 |
15 | /** A type that explicitly indicates that a Python object is a borrowed
16 | reference. This is implicitly convertible from a regular `PyObject*` or a
17 | `owned_ref`. This type may be used as a Python object parameter like:
18 |
19 | \code
20 | int f(borrowed_ref a, borrowed_ref b);
21 | \endcode
22 |
23 | This allows calling this function with either `owned_ref` or
24 | `PyObject*`.
25 |
26 | @note A `borrowed_ref` may still hold a value of `nullptr`.
27 | */
28 | template
29 | class borrowed_ref {
30 | private:
31 | T* m_ref;
32 |
33 | public:
34 | constexpr borrowed_ref() : m_ref(nullptr) {}
35 | constexpr borrowed_ref(std::nullptr_t) : m_ref(nullptr) {}
36 | constexpr borrowed_ref(T* ref) : m_ref(ref) {}
37 | constexpr borrowed_ref(const owned_ref& ref) : m_ref(ref.get()) {}
38 |
39 | constexpr T* get() const {
40 | return m_ref;
41 | }
42 |
43 | explicit constexpr operator T*() const {
44 | return m_ref;
45 | }
46 |
47 | // use an enable_if to resolve the ambiguous dispatch when T is PyObject
48 | template::value>>
50 | explicit operator PyObject*() const {
51 | return reinterpret_cast(m_ref);
52 | }
53 |
54 | T& operator*() const {
55 | return *m_ref;
56 | }
57 |
58 | T* operator->() const {
59 | return m_ref;
60 | }
61 |
62 | explicit operator bool() const {
63 | return m_ref;
64 | }
65 |
66 | bool operator==(borrowed_ref<> other) const {
67 | return m_ref == other.get();
68 | }
69 |
70 | bool operator!=(borrowed_ref<> other) const {
71 | return m_ref != other.get();
72 | }
73 | };
74 |
75 | /** An RAII wrapper for ensuring an object is cleaned up in a given scope.
76 | */
77 | template
78 | class owned_ref {
79 | private:
80 | T* m_ref;
81 |
82 | public:
83 | /** The type of the underlying pointer.
84 | */
85 | using element_type = T;
86 |
87 | /** Default construct a scoped ref to a `nullptr`.
88 | */
89 | constexpr owned_ref() : m_ref(nullptr) {}
90 |
91 | constexpr owned_ref(std::nullptr_t) : m_ref(nullptr) {}
92 |
93 | /** Manage a new reference. `ref` should not be used outside of the
94 | `owned_ref`.
95 |
96 | @param ref The reference to manage
97 | */
98 | constexpr explicit owned_ref(T* ref) : m_ref(ref) {}
99 |
100 | constexpr owned_ref(const owned_ref& cpfrom) : m_ref(cpfrom.m_ref) {
101 | Py_XINCREF(m_ref);
102 | }
103 |
104 | constexpr owned_ref(owned_ref&& mvfrom) noexcept : m_ref(mvfrom.m_ref) {
105 | mvfrom.m_ref = nullptr;
106 | }
107 |
108 | constexpr owned_ref& operator=(const owned_ref& cpfrom) {
109 | // we need to incref before we decref to support self assignment
110 | Py_XINCREF(cpfrom.m_ref);
111 | Py_XDECREF(m_ref);
112 | m_ref = cpfrom.m_ref;
113 | return *this;
114 | }
115 |
116 | constexpr owned_ref& operator=(owned_ref&& mvfrom) noexcept {
117 | std::swap(m_ref, mvfrom.m_ref);
118 | return *this;
119 | }
120 |
121 | /** Create a scoped ref that is a new reference to `ref`.
122 |
123 | @param ref The Python object to create a new managed reference to.
124 | */
125 | constexpr static owned_ref new_reference(borrowed_ref ref) {
126 | Py_INCREF(ref.get());
127 | return owned_ref{ref.get()};
128 | }
129 |
130 | /** Create a scoped ref that is a new reference to `ref` if `ref` is non-null.
131 |
132 | @param ref The Python object to create a new managed reference to. If `ref`
133 | is `nullptr`, then the resulting object just holds `nullptr` also.
134 | */
135 | constexpr static owned_ref xnew_reference(borrowed_ref ref) {
136 | Py_XINCREF(ref.get());
137 | return owned_ref{ref.get()};
138 | }
139 |
140 | /** Decref the managed pointer if it is not `nullptr`.
141 | */
142 | ~owned_ref() {
143 | Py_XDECREF(m_ref);
144 | }
145 |
146 | /** Return the underlying pointer and invalidate the `owned_ref`.
147 |
148 | This allows the reference to "escape" the current scope.
149 |
150 | @return The underlying pointer.
151 | @see get
152 | */
153 | T* escape() && {
154 | T* ret = m_ref;
155 | m_ref = nullptr;
156 | return ret;
157 | }
158 |
159 | /** Get the underlying managed pointer.
160 |
161 | @return The pointer managed by this `owned_ref`.
162 | @see escape
163 | */
164 | constexpr T* get() const {
165 | return m_ref;
166 | }
167 |
168 | explicit operator T*() const {
169 | return m_ref;
170 | }
171 |
172 | // use an enable_if to resolve the ambiguous dispatch when T is PyObject
173 | template::value>>
175 | explicit operator PyObject*() const {
176 | return reinterpret_cast(m_ref);
177 | }
178 |
179 | T& operator*() const {
180 | return *m_ref;
181 | }
182 |
183 | T* operator->() const {
184 | return m_ref;
185 | }
186 |
187 | explicit operator bool() const {
188 | return m_ref;
189 | }
190 |
191 | bool operator==(borrowed_ref<> other) const {
192 | return m_ref == other.get();
193 | }
194 |
195 | bool operator!=(borrowed_ref<> other) const {
196 | return m_ref != other.get();
197 | }
198 | };
199 |
200 | class object_map_key {
201 | private:
202 | borrowed_ref<> m_ob;
203 |
204 | public:
205 | object_map_key(borrowed_ref<> ob) : m_ob(owned_ref<>::new_reference(ob)) {}
206 | object_map_key(const owned_ref<>& ob) : m_ob(ob) {}
207 |
208 | object_map_key() = default;
209 | object_map_key(const object_map_key&) = default;
210 | object_map_key(object_map_key&&) = default;
211 |
212 | object_map_key& operator=(const object_map_key&) = default;
213 | object_map_key& operator=(object_map_key&&) = default;
214 |
215 | PyObject* get() const {
216 | return m_ob.get();
217 | }
218 |
219 | explicit operator bool() const noexcept {
220 | return static_cast(m_ob);
221 | }
222 |
223 | operator const borrowed_ref<>&() const noexcept {
224 | return m_ob;
225 | }
226 |
227 | bool operator==(const object_map_key& other) const {
228 | if (m_ob == other.m_ob) {
229 | return true;
230 | }
231 | if (!m_ob) {
232 | return !static_cast(other.m_ob);
233 | }
234 | if (!other.m_ob) {
235 | return false;
236 | }
237 |
238 | int r = PyObject_RichCompareBool(m_ob.get(), other.get(), Py_EQ);
239 | if (r < 0) {
240 | throw std::runtime_error{"failed to compare"};
241 | }
242 |
243 | return r;
244 | }
245 |
246 | bool operator!=(const object_map_key& other) const {
247 | if (m_ob != other.m_ob) {
248 | return true;
249 | }
250 |
251 | if (!m_ob) {
252 | return static_cast(other.m_ob);
253 | }
254 | if (!other.m_ob) {
255 | return true;
256 | }
257 |
258 | int r = PyObject_RichCompareBool(m_ob.get(), other.get(), Py_NE);
259 | if (r < 0) {
260 | throw std::runtime_error{"failed to compare"};
261 | }
262 |
263 | return r;
264 | }
265 | };
266 | } // namespace qb
267 |
268 | namespace std {
269 | template<>
270 | struct hash {
271 | auto operator()(const qb::object_map_key& ob) const {
272 | // this returns a different type in Python 2 and Python 3
273 | using out_type = decltype(PyObject_Hash(ob.get()));
274 |
275 | if (!ob.get()) {
276 | return out_type{0};
277 | }
278 |
279 | out_type r = PyObject_Hash(ob.get());
280 | if (r == -1) {
281 | throw std::runtime_error{"python hash failed"};
282 | }
283 |
284 | return r;
285 | }
286 | };
287 | }
288 |
289 | namespace qb {
290 | class slab {
291 | private:
292 | struct free_deleter {
293 | void operator()(std::byte* p) {
294 | std::free(p);
295 | }
296 | };
297 |
298 | std::unique_ptr m_data;
299 | std::size_t m_size;
300 | std::size_t m_cap;
301 |
302 | public:
303 | slab(slab&&) = default;
304 |
305 | private:
306 | static std::unique_ptr allocate_slab(std::size_t cap) {
307 | void* p = malloc(cap);
308 | if (!p) {
309 | throw std::bad_alloc{};
310 | }
311 |
312 | return std::unique_ptr{reinterpret_cast(p)};
313 | }
314 |
315 | public:
316 | explicit slab(std::size_t cap) : m_data(allocate_slab(cap)), m_size(0), m_cap(cap) {}
317 |
318 | std::size_t capacity() const {
319 | return m_cap;
320 | }
321 |
322 | bool contains(std::byte* p) const {
323 | return std::greater_equal{}(p, m_data.get()) &&
324 | std::less_equal{}(p, m_data.get() + capacity());
325 | }
326 |
327 | std::byte* try_allocate(std::size_t size, std::size_t align) {
328 | std::size_t align_padding = (align - (m_size % align)) % align;
329 | if (m_size + align_padding + size > capacity()) {
330 | return nullptr;
331 | }
332 | m_size += align_padding;
333 | std::byte* out = m_data.get() + m_size;
334 | m_size += size;
335 | return out;
336 | }
337 | };
338 |
339 | class arena;
340 | class arena_allocatable_object;
341 |
342 | class arena : public std::enable_shared_from_this {
343 | public:
344 | template
345 | class allocator {
346 | private:
347 | arena* m_arena;
348 |
349 | public:
350 | using value_type = T;
351 |
352 | explicit allocator(arena* arena) : m_arena(arena) {}
353 |
354 | template
355 | allocator(const allocator& cpfrom) : m_arena(cpfrom.get_arena()) {}
356 |
357 | T* allocate(std::size_t count) {
358 | if (!m_arena) {
359 | return new T[count];
360 | }
361 | return reinterpret_cast(m_arena->allocate(count * sizeof(T), alignof(T)));
362 | }
363 |
364 | void deallocate(T* ptr, std::size_t) {
365 | if (!m_arena) {
366 | delete[] ptr;
367 | }
368 | }
369 |
370 | arena* get_arena() const {
371 | return m_arena;
372 | }
373 | };
374 |
375 | private:
376 | std::vector m_slabs;
377 | std::deque, allocator>> m_external_references;
378 |
379 | static std::vector initialize_slabs(std::size_t slab_size) {
380 | std::vector out;
381 | out.emplace_back(slab_size);
382 | return out;
383 | }
384 |
385 | public:
386 | arena(arena&&) = delete;
387 |
388 | explicit arena(std::size_t slab_size)
389 | : m_slabs(initialize_slabs(slab_size)),
390 | m_external_references(allocator>{this}) {}
391 |
392 | bool contains(std::byte* p) const {
393 | for (const slab& s : m_slabs) {
394 | if (s.contains(p)) {
395 | return true;
396 | }
397 | }
398 | return false;
399 | }
400 |
401 | std::byte* allocate(std::size_t size, std::size_t align) {
402 | std::size_t capacity = m_slabs.back().capacity();
403 | if (size > capacity) {
404 | std::stringstream ss;
405 | ss << "cannot allocate objects larger than the slab size: " << size << " > "
406 | << capacity;
407 | throw std::runtime_error{ss.str()};
408 | }
409 |
410 | std::byte* out = m_slabs.back().try_allocate(size, align);
411 | if (!out) {
412 | m_slabs.emplace_back(capacity);
413 | out = m_slabs.back().try_allocate(size, align);
414 | assert(out);
415 | }
416 | return out;
417 | }
418 |
419 | void add_external_reference(borrowed_ref<> ob) {
420 | m_external_references.emplace_back(owned_ref<>::new_reference(ob));
421 | }
422 | };
423 |
424 | struct arena_allocatable_meta_object : public PyHeapTypeObject {
425 | std::vector> arena_stack;
426 | };
427 |
428 | namespace arena_allocatable_methods {
429 | void dealloc(PyObject*);
430 | }
431 |
432 | namespace arena_allocatable_meta_methods{
433 | PyObject* new_(PyTypeObject* cls, PyObject* args, PyObject* kwargs) {
434 | owned_ref out{PyType_Type.tp_new(cls, args, kwargs)};
435 | if (!out) {
436 | return nullptr;
437 | }
438 | int res = PyObject_HasAttrString(out.get(), "__slots__");
439 | if (res < 0) {
440 | return nullptr;
441 | }
442 | if (res) {
443 | PyErr_SetString(PyExc_TypeError,
444 | "cannot add __slots__ to an ArenaAllocatable type");
445 | return nullptr;
446 | }
447 |
448 | auto* as_type = reinterpret_cast(out.get());
449 | as_type->tp_flags &= ~Py_TPFLAGS_HAVE_GC;
450 | as_type->tp_dealloc = arena_allocatable_methods::dealloc;
451 |
452 | try {
453 | new (&reinterpret_cast(out.get())->arena_stack)
454 | std::vector>{};
455 | }
456 | catch (const std::exception& e) {
457 | PyErr_Format(PyExc_RuntimeError, "a C++ error was raised: %s", e.what());
458 | }
459 | return std::move(out).escape();
460 | }
461 |
462 | void dealloc(PyObject* untyped_self) {
463 | auto* typed_self = reinterpret_cast(untyped_self);
464 | typed_self->arena_stack.~vector();
465 | PyType_Type.tp_dealloc(untyped_self);
466 | }
467 | } // namespace arena_allocatable_meta_methods
468 |
469 | PyTypeObject arena_allocatable_meta_type = {
470 | // clang-format disable
471 | PyVarObject_HEAD_INIT(&PyType_Type, 0)
472 | // clang-format enable
473 | "quelling_blade.arena_allocatable._ArenaAllocatableMeta", // tp_name
474 | sizeof(arena_allocatable_meta_object), // tp_basicsize
475 | 0, // tp_itemsize
476 | arena_allocatable_meta_methods::dealloc, // tp_dealloc
477 | 0, // tp_print
478 | 0, // tp_getattr
479 | 0, // tp_setattr
480 | 0, // tp_reserved
481 | 0, // tp_repr
482 | 0, // tp_as_number
483 | 0, // tp_as_sequence
484 | 0, // tp_as_mapping
485 | 0, // tp_hash
486 | 0, // tp_call
487 | 0, // tp_str
488 | 0, // tp_getattro
489 | 0, // tp_setattro
490 | 0, // tp_as_buffer
491 | Py_TPFLAGS_DEFAULT, // tp_flags
492 | 0, // tp_doc
493 | 0, // tp_traverse
494 | 0, // tp_clear
495 | 0, // tp_richcompare
496 | 0, // tp_weaklistoffset
497 | 0, // tp_iter
498 | 0, // tp_iternext
499 | 0, // tp_methods
500 | 0, // tp_members
501 | 0, // tp_getset
502 | &PyType_Type, // tp_base
503 | 0, // tp_dict
504 | 0, // tp_descr_get
505 | 0, // tp_descr_set
506 | 0, // tp_dictoffset
507 | 0, // tp_init
508 | 0, // tp_alloc
509 | arena_allocatable_meta_methods::new_, // tp_new
510 | };
511 |
512 | struct arena_context_object {
513 | PyObject head;
514 | bool popped;
515 | std::vector> cls;
516 | std::size_t size;
517 | };
518 |
519 | namespace arena_context_methods {
520 | PyObject* new_(PyTypeObject*, PyObject*, PyObject*);
521 |
522 |
523 | PyObject* enter(PyObject* untyped_self, PyObject*) {
524 | Py_INCREF(untyped_self);
525 | return untyped_self;
526 | }
527 |
528 | int close_impl(borrowed_ref self) {
529 | if (self->popped || !self->cls.size()) {
530 | return 0;
531 | }
532 | long use_count = self->cls.front()->arena_stack.back().use_count();
533 | long alive = use_count - self->cls.size();
534 | if (alive) {
535 | return PyErr_WarnFormat(PyExc_RuntimeWarning,
536 | 1,
537 | "%ld object%s still alive at arena exit",
538 | alive,
539 | (alive != 1) ? "s are" : " is");
540 | }
541 | for (borrowed_ref cls : self->cls) {
542 | cls->arena_stack.pop_back();
543 | }
544 | self->popped = true;
545 | return 0;
546 | }
547 |
548 | PyObject* close(PyObject* untyped_self, PyObject*) {
549 | borrowed_ref self{reinterpret_cast(untyped_self)};
550 | if (self->popped) {
551 | PyErr_SetString(PyExc_RuntimeError, "arena context was already closed");
552 | return nullptr;
553 | }
554 | if (close_impl(self)) {
555 | return nullptr;
556 | }
557 | Py_RETURN_NONE;
558 | }
559 |
560 | PyObject* exit(PyObject* untyped_self, PyObject*) {
561 | return close(untyped_self, nullptr);
562 | }
563 |
564 | void dealloc(PyObject* untyped_self) {
565 | borrowed_ref self{reinterpret_cast(untyped_self)};
566 | if (close_impl(self)) {
567 | PyErr_WriteUnraisable(untyped_self);
568 | }
569 | PyObject_Del(untyped_self);
570 | }
571 |
572 | PyMethodDef methods[] = {
573 | {"close", close, METH_NOARGS, nullptr},
574 | {"__enter__", enter, METH_NOARGS, nullptr},
575 | {"__exit__", exit, METH_VARARGS, nullptr},
576 | {nullptr},
577 | };
578 | } // namespace arena_context_methods
579 |
580 | PyTypeObject arena_context_type = {
581 | // clang-format disable
582 | PyVarObject_HEAD_INIT(&PyType_Type, 0)
583 | // clang-format enable
584 | "quelling_blade.arena_allocatable.Arena", // tp_name
585 | sizeof(arena_context_object), // tp_basicsize
586 | 0, // tp_itemsize
587 | arena_context_methods::dealloc, // tp_dealloc
588 | 0, // tp_print
589 | 0, // tp_getattr
590 | 0, // tp_setattr
591 | 0, // tp_reserved
592 | 0, // tp_repr
593 | 0, // tp_as_number
594 | 0, // tp_as_sequence
595 | 0, // tp_as_mapping
596 | 0, // tp_hash
597 | 0, // tp_call
598 | 0, // tp_str
599 | 0, // tp_getattro
600 | 0, // tp_setattro
601 | 0, // tp_as_buffer
602 | Py_TPFLAGS_DEFAULT, // tp_flags
603 | 0, // tp_doc
604 | 0, // tp_traverse
605 | 0, // tp_clear
606 | 0, // tp_richcompare
607 | 0, // tp_weaklistoffset
608 | 0, // tp_iter
609 | 0, // tp_iternext
610 | arena_context_methods::methods, // tp_methods
611 | 0, // tp_members
612 | 0, // tp_getset
613 | 0, // tp_base
614 | 0, // tp_dict
615 | 0, // tp_descr_get
616 | 0, // tp_descr_set
617 | 0, // tp_dictoffset
618 | 0, // tp_init
619 | 0, // tp_alloc
620 | arena_context_methods::new_, // tp_new
621 | };
622 |
623 |
624 | namespace arena_context_methods {
625 | PyObject* new_(PyTypeObject*, PyObject* args, PyObject* kwargs) {
626 | static const char* const keywords[] = {"types", "slab_size", nullptr};
627 | PyObject* borrowed_types;
628 | Py_ssize_t slab_size = 1 << 16;
629 | if (!PyArg_ParseTupleAndKeywords(args,
630 | kwargs,
631 | "O|n:Arena",
632 | const_cast(keywords),
633 | &borrowed_types,
634 | &slab_size)) {
635 | return nullptr;
636 | }
637 |
638 | owned_ref<> types = owned_ref<>::new_reference(borrowed_types);
639 |
640 | int res = PyObject_IsInstance(types.get(), reinterpret_cast(&PyType_Type));
641 | if (res < 0) {
642 | return nullptr;
643 | }
644 | if (res) {
645 | if (!(types = owned_ref{PyTuple_Pack(1, types.get())})) {
646 | return nullptr;
647 | }
648 | }
649 |
650 | owned_ref types_iter{PyObject_GetIter(types.get())};
651 | if (!types_iter) {
652 | return nullptr;
653 | }
654 |
655 | owned_ref out{PyObject_New(arena_context_object, &arena_context_type)};
656 | if (!out) {
657 | return nullptr;
658 | }
659 |
660 | std::shared_ptr arena;
661 | try {
662 | new (&out.get()->popped) bool{false};
663 | new (&out.get()->cls) std::vector>{};
664 | new (&out.get()->size) std::size_t{static_cast(slab_size)};
665 |
666 |
667 | arena = std::make_shared(slab_size);
668 | }
669 | catch (const std::exception& e) {
670 | PyErr_SetString(PyExc_RuntimeError, e.what());
671 | return nullptr;
672 | }
673 |
674 | while (owned_ref type{PyIter_Next(types_iter.get())}) {
675 | int res = PyObject_IsInstance(type.get(),
676 | reinterpret_cast(
677 | &arena_allocatable_meta_type));
678 | if (res < 0) {
679 | return nullptr;
680 | }
681 | if (!res) {
682 | PyErr_Format(PyExc_TypeError,
683 | "%R is not a subclass of ArenaAllocatable",
684 | type.get());
685 | return nullptr;
686 | }
687 |
688 | borrowed_ref typed_type{
689 | reinterpret_cast(type.get())};
690 | try {
691 | typed_type->arena_stack.emplace_back(arena);
692 | out->cls.emplace_back(
693 | owned_ref::new_reference(typed_type));
694 | }
695 | catch (const std::exception& e) {
696 | PyErr_SetString(PyExc_RuntimeError, e.what());
697 | return nullptr;
698 | }
699 | }
700 | if (PyErr_Occurred()) {
701 | return nullptr;
702 | }
703 |
704 | return reinterpret_cast(std::move(out).escape());
705 | }
706 | } // namespace arena_context_methods
707 |
708 | struct arena_allocatable_object : public PyObject {
709 | using members_type =
710 | absl::flat_hash_map,
713 | absl::container_internal::hash_default_eq,
714 | arena::allocator>>;
715 |
716 | std::shared_ptr owning_arena;
717 | members_type members;
718 |
719 | arena_allocatable_object(const std::shared_ptr& arena,
720 | borrowed_ref type)
721 | : PyObject({_PyObject_EXTRA_INIT 1, type.get()}),
722 | owning_arena(arena),
723 | // use a "weak" pointer here to break a ref cycle
724 | members(members_type::allocator_type{arena.get()}) {
725 | }
726 | };
727 |
728 | namespace arena_allocatable_methods {
729 | PyObject* new_(PyTypeObject* cls, PyObject*, PyObject*) {
730 | try {
731 | auto& arena_stack =
732 | reinterpret_cast(cls)->arena_stack;
733 | if (!arena_stack.size()) {
734 | Py_INCREF(cls);
735 | auto* allocation = PyMem_New(arena_allocatable_object, 1);
736 | new(allocation) arena_allocatable_object(std::shared_ptr{}, cls);
737 | return allocation;
738 | }
739 |
740 | std::byte* allocation =
741 | arena_stack.back()->allocate(cls->tp_basicsize,
742 | alignof(arena_allocatable_object));
743 | new (allocation) arena_allocatable_object(arena_stack.back(), cls);
744 | return reinterpret_cast(allocation);
745 | }
746 | catch (const std::exception& e) {
747 | PyErr_SetString(PyExc_RuntimeError, e.what());
748 | return nullptr;
749 | }
750 | }
751 |
752 | int setattr(PyObject* untyped_self, PyObject* key_ptr, PyObject* value) {
753 | // search for a descriptor on the type before looking on the instance
754 | borrowed_ref<> descr = _PyType_Lookup(Py_TYPE(untyped_self), key_ptr);
755 | descrsetfunc descrset = descr ? Py_TYPE(descr)->tp_descr_set : nullptr;
756 | if (descrset) {
757 | Py_INCREF(descr);
758 | int res = descrset(descr.get(), untyped_self, value);
759 | Py_DECREF(descr);
760 | return res;
761 | }
762 |
763 | try {
764 | borrowed_ref self{static_cast(untyped_self)};
765 |
766 | borrowed_ref key{key_ptr};
767 | auto arena = self->owning_arena;
768 | if (arena) {
769 | if (value) {
770 | arena->add_external_reference(key);
771 | if (!arena->contains(reinterpret_cast(value))) {
772 | arena->add_external_reference(value);
773 | }
774 | self->members.insert_or_assign(key, value);
775 | }
776 | else {
777 | if (self->members.erase(key) == 0) {
778 | PyErr_SetObject(PyExc_AttributeError, key.get());
779 | return -1;
780 | }
781 | }
782 | }
783 | else {
784 | if (value) {
785 | Py_INCREF(value); // inc before when overwriting an attr with itself
786 | auto [it, inserted] = self->members.try_emplace(key, value);
787 | if (inserted) {
788 | Py_INCREF(key.get());
789 | }
790 | else {
791 | Py_DECREF(it->second);
792 | it->second = value;
793 | }
794 | }
795 | else {
796 | auto search = self->members.find(key);
797 | if (search == self->members.end()) {
798 | PyErr_SetObject(PyExc_AttributeError, key.get());
799 | return -1;
800 | }
801 | Py_DECREF(search->first.get());
802 | Py_DECREF(search->second);
803 | self->members.erase(search);
804 | }
805 | }
806 | return 0;
807 | }
808 | catch (const std::exception& e) {
809 | PyErr_SetString(PyExc_RuntimeError, e.what());
810 | return -1;
811 | }
812 | }
813 |
814 | PyObject* getattr(PyObject* untyped_self, PyObject* key) {
815 | borrowed_ref tp = Py_TYPE(untyped_self);
816 | // search for a descriptor on the type before looking on the instance
817 | borrowed_ref<> descr = _PyType_Lookup(tp.get(), key);
818 | descrgetfunc descrget = descr ? Py_TYPE(descr)->tp_descr_get : nullptr;
819 | if (descrget && PyDescr_IsData(descr)) {
820 | // data descriptors take precedence over instance data, call the descriptor
821 | Py_INCREF(descr);
822 | PyObject* res = descrget(descr.get(), untyped_self, static_cast(tp));
823 | Py_DECREF(descr);
824 | return res;
825 | }
826 |
827 | try {
828 | borrowed_ref self{reinterpret_cast(untyped_self)};
829 |
830 | auto it = self->members.find(borrowed_ref{key});
831 | if (it == self->members.end()) {
832 | if (descrget) {
833 | // use the descriptor if available
834 | Py_INCREF(descr);
835 | PyObject* res =
836 | descrget(descr.get(), untyped_self, static_cast(tp));
837 | Py_DECREF(descr);
838 | return res;
839 | }
840 | PyErr_SetObject(PyExc_AttributeError, key);
841 | return nullptr;
842 | }
843 | PyObject* out = it->second;
844 | if (out->ob_refcnt == 0) {
845 | assert(self->owning_arena->contains(reinterpret_cast(out)));
846 | // add a reference to the arena
847 | reinterpret_cast(out)->owning_arena =
848 | self->owning_arena;
849 | }
850 | Py_INCREF(out);
851 | return out;
852 | }
853 | catch (const std::exception& e) {
854 | PyErr_SetString(PyExc_RuntimeError, e.what());
855 | return nullptr;
856 | }
857 | }
858 |
859 | void dealloc(PyObject* untyped_self) {
860 | borrowed_ref self{reinterpret_cast(untyped_self)};
861 |
862 | if (self->owning_arena) {
863 | // we are in an arena, just drop the ref
864 | self->owning_arena.reset();
865 | }
866 | else {
867 | // we have no arena, we need to actually clear out the instance and die
868 | for (auto [k, v] : self->members) {
869 | Py_DECREF(k.get());
870 | Py_DECREF(v);
871 | }
872 | Py_DECREF(Py_TYPE(untyped_self));
873 | self->~arena_allocatable_object();
874 | PyMem_Free(self.get());
875 | }
876 | }
877 | } // namespace arena_allocatable_methods
878 |
879 | arena_allocatable_meta_object arena_allocatable_type = {PyHeapTypeObject{{
880 | // clang-format off
881 | PyVarObject_HEAD_INIT(&arena_allocatable_meta_type, 0)
882 | // clang-format on
883 | "quelling_blade.arena_allocatable.ArenaAllocatable",
884 | sizeof(arena_allocatable_object),
885 | 0, // tp_itemsize
886 | arena_allocatable_methods::dealloc, // tp_dealloc
887 | 0, // tp_print
888 | 0, // tp_getattr
889 | 0, // tp_setattr
890 | 0, // tp_reserved
891 | 0, // tp_repr
892 | 0, // tp_as_number
893 | 0, // tp_as_sequence
894 | 0, // tp_as_mapping
895 | 0, // tp_hash
896 | 0, // tp_call
897 | 0, // tp_str
898 | arena_allocatable_methods::getattr, // tp_getattro
899 | arena_allocatable_methods::setattr, // tp_setattro
900 | 0, // tp_as_buffer
901 | Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, // tp_flags
902 | 0, // tp_doc
903 | 0, // tp_traverse
904 | 0, // tp_clear
905 | 0, // tp_richcompare
906 | 0, // tp_weaklistoffset
907 | 0, // tp_iter
908 | 0, // tp_iternext
909 | 0, // tp_methods
910 | 0, // tp_members
911 | 0, // tp_getset
912 | 0, // tp_base
913 | 0, // tp_dict
914 | 0, // tp_descr_get
915 | 0, // tp_descr_set
916 | 0, // tp_dictoffset
917 | 0, // tp_init
918 | 0, // tp_alloc
919 | arena_allocatable_methods::new_, // tp_new
920 | }}};
921 |
922 | PyModuleDef module = {PyModuleDef_HEAD_INIT,
923 | "quelling_blade.arena_allocatable",
924 | nullptr,
925 | -1,
926 | nullptr,
927 | nullptr,
928 | nullptr,
929 | nullptr};
930 |
931 | PyMODINIT_FUNC PyInit_arena_allocatable() {
932 | for (PyTypeObject* tp : {&arena_allocatable_meta_type,
933 | &arena_allocatable_type.ht_type,
934 | &arena_context_type}) {
935 | if (PyType_Ready(tp) < 0) {
936 | return nullptr;
937 | }
938 | }
939 |
940 | owned_ref mod{PyModule_Create(&module)};
941 | if (!mod) {
942 | return nullptr;
943 | }
944 |
945 | if (PyObject_SetAttrString(mod.get(),
946 | "ArenaAllocatable",
947 | reinterpret_cast(&arena_allocatable_type))) {
948 | return nullptr;
949 | }
950 | if (PyObject_SetAttrString(mod.get(),
951 | "Arena",
952 | reinterpret_cast(&arena_context_type))) {
953 | return nullptr;
954 | }
955 |
956 | return std::move(mod).escape();
957 | }
958 | } // namespace qb
959 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # !/usr/bin/env python
2 | import os
3 | from os.path import abspath, dirname, exists
4 | import subprocess
5 | import sys
6 |
7 | from setuptools import (
8 | Extension,
9 | find_packages,
10 | setup,
11 | )
12 | from setuptools.command.build_ext import build_ext
13 |
14 |
15 | class CMakeExtension(Extension):
16 | def __init__(self, name, cmake_lists_dir='.', sources=None, **kwargs):
17 | Extension.__init__(self, name, sources=sources or [], **kwargs)
18 | self.cmake_lists_dir = abspath(cmake_lists_dir)
19 |
20 |
21 | class CMakeBuild(build_ext):
22 |
23 | def build_extensions(self):
24 | try:
25 | subprocess.check_output(['cmake', '--version'])
26 | except OSError:
27 | raise RuntimeError('Cannot find CMake executable')
28 |
29 | for ext in self.extensions:
30 | extdir = abspath(dirname(self.get_ext_fullpath(ext.name)))
31 | cfg = os.environ.get('QB_BUILD_TYPE', 'Release')
32 |
33 | cmake_args = [
34 | '-DCMAKE_BUILD_TYPE=' + cfg,
35 | '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format(
36 | cfg.upper(), extdir,
37 | ),
38 | '-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY_{}={}'.format(
39 | cfg.upper(), self.build_temp,
40 | ),
41 | '-DPYTHON_EXECUTABLE={}'.format(sys.executable),
42 | '-DBUILD_TESTING=OFF',
43 | ]
44 |
45 | if not exists(self.build_temp):
46 | os.makedirs(self.build_temp)
47 |
48 | # Config and build the extension
49 | subprocess.check_call(
50 | ['cmake', ext.cmake_lists_dir, '-G', 'Ninja'] + cmake_args,
51 | cwd=self.build_temp,
52 | )
53 | subprocess.check_call(
54 | ['cmake', '--build', '.', '--config', cfg],
55 | cwd=self.build_temp,
56 | )
57 |
58 |
59 | ext_modules = [
60 | CMakeExtension('quelling_blade.arena_allocatable'),
61 | ]
62 |
63 | setup(
64 | name='quelling-blade',
65 | version='0.0.1',
66 | description='Library for creating objects which can be arena allocated.',
67 | author='Joe Jevnik',
68 | author_email='joejev@gmail.com',
69 | packages=find_packages(),
70 | cmdclass={'build_ext': CMakeBuild},
71 | ext_modules=ext_modules,
72 | )
73 |
--------------------------------------------------------------------------------