├── pydatastructs ├── graphs │ ├── tests │ │ ├── __init__.py │ │ ├── meson.build │ │ ├── test_adjacency_matrix.py │ │ └── test_adjacency_list.py │ ├── _backend │ │ ├── __init__.py │ │ └── cpp │ │ │ ├── __init__.py │ │ │ ├── algorithms.cpp │ │ │ └── graph.cpp │ ├── __init__.py │ ├── meson.build │ ├── _extensions.py │ ├── adjacency_matrix.py │ ├── adjacency_list.py │ └── graph.py ├── trees │ ├── tests │ │ ├── __init__.py │ │ ├── test_m_ary_trees.py │ │ ├── meson.build │ │ ├── test_space_partitioning_tree.py │ │ └── benchmarks │ │ │ └── test_binary_trees.py │ ├── _backend │ │ ├── __init__.py │ │ └── cpp │ │ │ ├── __init__.py │ │ │ ├── trees.cpp │ │ │ ├── Treap.hpp │ │ │ └── BinaryIndexedTree.hpp │ ├── _extensions.py │ ├── meson.build │ ├── __init__.py │ └── m_ary_trees.py ├── utils │ ├── tests │ │ ├── __init__.py │ │ ├── meson.build │ │ └── test_misc_util.py │ ├── _backend │ │ ├── __init__.py │ │ └── cpp │ │ │ ├── __init__.py │ │ │ ├── graph_bindings.hpp │ │ │ ├── nodes.cpp │ │ │ ├── AdjacencyMatrixGraphNode.hpp │ │ │ ├── Node.hpp │ │ │ ├── TreeNode.hpp │ │ │ ├── GraphEdge.hpp │ │ │ └── utils.hpp │ ├── raises_util.py │ ├── __init__.py │ ├── meson.build │ ├── _extensions.py │ └── testing_util.py ├── strings │ ├── tests │ │ ├── __init__.py │ │ ├── meson.build │ │ ├── test_trie.py │ │ └── test_algorithms.py │ ├── meson.build │ ├── __init__.py │ └── trie.py ├── linear_data_structures │ ├── _backend │ │ ├── __init__.py │ │ └── cpp │ │ │ ├── __init__.py │ │ │ ├── algorithms │ │ │ ├── __init__.py │ │ │ ├── algorithms.cpp │ │ │ └── quick_sort.hpp │ │ │ └── arrays │ │ │ ├── DynamicArray.hpp │ │ │ ├── arrays.cpp │ │ │ └── Array.hpp │ ├── tests │ │ ├── __init__.py │ │ ├── benchmarks │ │ │ ├── __init__.py │ │ │ ├── test_algorithms.py │ │ │ └── test_arrays.py │ │ ├── meson.build │ │ └── test_arrays.py │ ├── _extensions.py │ ├── meson.build │ └── __init__.py ├── miscellaneous_data_structures │ ├── tests │ │ ├── __init__.py │ │ ├── meson.build │ │ ├── test_binomial_trees.py │ │ ├── test_multiset.py │ │ ├── test_disjoint_set.py │ │ ├── test_range_query_static.py │ │ ├── test_range_query_dynamic.py │ │ ├── test_stack.py │ │ └── test_queue.py │ ├── _backend │ │ ├── __init__.py │ │ └── cpp │ │ │ ├── __init__.py │ │ │ └── stack │ │ │ └── stack.cpp │ ├── _extensions.py │ ├── meson.build │ ├── __init__.py │ ├── multiset.py │ ├── binomial_trees.py │ ├── sparse_table.py │ ├── disjoint_set.py │ └── stack.py ├── __init__.py └── meson.build ├── docs ├── requirements.txt ├── source │ ├── pydatastructs │ │ ├── trees │ │ │ ├── m_ary_trees.rst │ │ │ ├── space_partitioning_trees.rst │ │ │ ├── trees.rst │ │ │ ├── heaps.rst │ │ │ └── binary_trees.rst │ │ ├── strings │ │ │ ├── trie.rst │ │ │ ├── algorithms.rst │ │ │ └── strings.rst │ │ ├── graphs │ │ │ ├── graph.rst │ │ │ ├── graphs.rst │ │ │ └── algorithms.rst │ │ ├── miscellaneous_data_structures │ │ │ ├── stack.rst │ │ │ ├── algorithms.rst │ │ │ ├── sparse_table.rst │ │ │ ├── binomial_trees.rst │ │ │ ├── disjoint_set.rst │ │ │ ├── queue.rst │ │ │ └── miscellaneous_data_structures.rst │ │ ├── linear_data_structures │ │ │ ├── linear_data_structures.rst │ │ │ ├── arrays.rst │ │ │ ├── linked_lists.rst │ │ │ └── algorithms.rst │ │ └── pydatastructs.rst │ ├── authors.rst │ ├── conf.py │ ├── index.rst │ ├── contributing.rst │ └── tutorials.rst ├── Makefile └── make.bat ├── requirements.txt ├── .editorconfig ├── .coveragerc ├── meson.build ├── .readthedocs.yaml ├── codecov.yml ├── ISSUE_TEMPLATE.md ├── environment.yml ├── scripts └── build │ ├── dummy_submodules_data.py │ ├── add_dummy_submodules.py │ ├── delete_dummy_submodules.py │ ├── develop.py │ └── install.py ├── pyproject.toml ├── PULL_REQUEST_TEMPLATE.md ├── AUTHORS ├── .gitignore ├── setup.py ├── LICENSE ├── CODE_OF_CONDUCT.md └── .github └── workflows └── ci.yml /pydatastructs/graphs/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/trees/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/utils/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/graphs/_backend/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/strings/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/trees/_backend/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/graphs/_backend/cpp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/trees/_backend/cpp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/cpp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_backend/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_backend/cpp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/tests/benchmarks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/_backend/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/_backend/cpp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_backend/cpp/algorithms/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx==5.0 2 | sphinx-readable-theme==1.3.0 3 | myst_nb==0.17.2 -------------------------------------------------------------------------------- /docs/source/pydatastructs/trees/m_ary_trees.rst: -------------------------------------------------------------------------------- 1 | M-ary Trees 2 | =========== 3 | 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | codecov 2 | pytest 3 | pytest-cov 4 | llvmlite 5 | spin 6 | meson 7 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/strings/trie.rst: -------------------------------------------------------------------------------- 1 | Trie 2 | ==== 3 | 4 | .. autoclass:: pydatastructs.Trie -------------------------------------------------------------------------------- /docs/source/pydatastructs/graphs/graph.rst: -------------------------------------------------------------------------------- 1 | Graph 2 | ===== 3 | 4 | .. autoclass:: pydatastructs.Graph -------------------------------------------------------------------------------- /docs/source/pydatastructs/strings/algorithms.rst: -------------------------------------------------------------------------------- 1 | Algorithms 2 | ========== 3 | 4 | .. autofunction:: pydatastructs.find -------------------------------------------------------------------------------- /docs/source/pydatastructs/miscellaneous_data_structures/stack.rst: -------------------------------------------------------------------------------- 1 | Stack 2 | ===== 3 | 4 | .. autoclass:: pydatastructs.Stack -------------------------------------------------------------------------------- /docs/source/pydatastructs/graphs/graphs.rst: -------------------------------------------------------------------------------- 1 | Graphs 2 | ====== 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | 7 | graph.rst 8 | algorithms.rst -------------------------------------------------------------------------------- /docs/source/pydatastructs/miscellaneous_data_structures/algorithms.rst: -------------------------------------------------------------------------------- 1 | Algorithms 2 | ========== 3 | 4 | .. autoclass:: pydatastructs.RangeQueryStatic -------------------------------------------------------------------------------- /docs/source/pydatastructs/miscellaneous_data_structures/sparse_table.rst: -------------------------------------------------------------------------------- 1 | SparseTable 2 | =========== 3 | 4 | .. autoclass:: pydatastructs.SparseTable -------------------------------------------------------------------------------- /docs/source/pydatastructs/strings/strings.rst: -------------------------------------------------------------------------------- 1 | Strings 2 | ======= 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | 7 | trie.rst 8 | algorithms.rst -------------------------------------------------------------------------------- /docs/source/pydatastructs/miscellaneous_data_structures/binomial_trees.rst: -------------------------------------------------------------------------------- 1 | Binomial Tree 2 | ============= 3 | 4 | .. autoclass:: pydatastructs.BinomialTree -------------------------------------------------------------------------------- /docs/source/pydatastructs/miscellaneous_data_structures/disjoint_set.rst: -------------------------------------------------------------------------------- 1 | Disjoint Set 2 | ============ 3 | 4 | .. autoclass:: pydatastructs.DisjointSetForest -------------------------------------------------------------------------------- /docs/source/pydatastructs/miscellaneous_data_structures/queue.rst: -------------------------------------------------------------------------------- 1 | Queues 2 | ====== 3 | 4 | .. autoclass:: pydatastructs.Queue 5 | 6 | .. autoclass:: pydatastructs.PriorityQueue -------------------------------------------------------------------------------- /docs/source/pydatastructs/trees/space_partitioning_trees.rst: -------------------------------------------------------------------------------- 1 | Space Partitioning Trees 2 | ======================== 3 | 4 | .. autoclass:: pydatastructs.OneDimensionalSegmentTree -------------------------------------------------------------------------------- /pydatastructs/trees/tests/test_m_ary_trees.py: -------------------------------------------------------------------------------- 1 | from pydatastructs import MAryTree 2 | 3 | def test_MAryTree(): 4 | m = MAryTree(1, 1) 5 | assert str(m) == '[(1, 1)]' 6 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | insert_final_newline = true 6 | 7 | [*.py] 8 | indent_style = space 9 | indent_size = 4 10 | trim_trailing_whitespace = true 11 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/trees/trees.rst: -------------------------------------------------------------------------------- 1 | Trees 2 | ===== 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | 7 | binary_trees.rst 8 | heaps.rst 9 | m_ary_trees.rst 10 | space_partitioning_trees.rst -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | parallel = True 3 | source = pydatastructs 4 | omit = 5 | */tests/* 6 | */setup.py 7 | 8 | [report] 9 | exclude_lines = 10 | pragma: no cover 11 | 12 | raise NotImplementedError 13 | -------------------------------------------------------------------------------- /meson.build: -------------------------------------------------------------------------------- 1 | project('pydatastructs', 'cpp', 2 | version : '1.0.1-dev', 3 | default_options : ['cpp_std=c++17']) 4 | 5 | 6 | python = import('python').find_installation(pure: false) 7 | 8 | subdir('pydatastructs') 9 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/linear_data_structures/linear_data_structures.rst: -------------------------------------------------------------------------------- 1 | Linear Data Structures 2 | ====================== 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | 7 | arrays.rst 8 | linked_lists.rst 9 | algorithms.rst -------------------------------------------------------------------------------- /pydatastructs/__init__.py: -------------------------------------------------------------------------------- 1 | from .utils import * 2 | from .linear_data_structures import * 3 | from .trees import * 4 | from .miscellaneous_data_structures import * 5 | from .graphs import * 6 | from .strings import * 7 | 8 | __version__ = "1.0.1-dev" 9 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/linear_data_structures/arrays.rst: -------------------------------------------------------------------------------- 1 | Arrays 2 | ====== 3 | 4 | .. autoclass:: pydatastructs.OneDimensionalArray 5 | 6 | .. autoclass:: pydatastructs.MultiDimensionalArray 7 | 8 | .. autoclass:: pydatastructs.DynamicOneDimensionalArray -------------------------------------------------------------------------------- /docs/source/pydatastructs/trees/heaps.rst: -------------------------------------------------------------------------------- 1 | Heaps 2 | ===== 3 | 4 | .. autoclass:: pydatastructs.BinaryHeap 5 | 6 | .. autoclass:: pydatastructs.TernaryHeap 7 | 8 | .. autoclass:: pydatastructs.DHeap 9 | 10 | .. autoclass:: pydatastructs.BinomialHeap 11 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-20.04 5 | tools: 6 | python: "3.8" 7 | 8 | sphinx: 9 | configuration: docs/source/conf.py 10 | 11 | python: 12 | install: 13 | - requirements: docs/requirements.txt 14 | - method: pip 15 | path: . 16 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | project: 4 | default: 5 | target: 0% 6 | threshold: 100% 7 | 8 | patch: false 9 | 10 | precision: 3 11 | 12 | comment: 13 | layout: "diff, files, reach" 14 | behavior: default 15 | require_changes: false 16 | -------------------------------------------------------------------------------- /pydatastructs/strings/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'algorithms.py', 7 | 'trie.py' 8 | ], 9 | subdir: 'pydatastructs/strings' 10 | ) 11 | 12 | subdir('tests') 13 | -------------------------------------------------------------------------------- /pydatastructs/strings/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [] 2 | 3 | from . import ( 4 | trie, 5 | algorithms 6 | ) 7 | 8 | from .trie import ( 9 | Trie 10 | ) 11 | 12 | __all__.extend(trie.__all__) 13 | 14 | from .algorithms import ( 15 | find 16 | ) 17 | 18 | __all__.extend(algorithms.__all__) 19 | -------------------------------------------------------------------------------- /pydatastructs/strings/tests/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'test_algorithms.py', 7 | 'test_trie.py' 8 | ], 9 | subdir: 'pydatastructs/strings/tests', 10 | install_tag: 'tests' 11 | ) 12 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/miscellaneous_data_structures/miscellaneous_data_structures.rst: -------------------------------------------------------------------------------- 1 | Miscellaneous Data Structures 2 | ============================= 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | 7 | stack.rst 8 | queue.rst 9 | binomial_trees.rst 10 | disjoint_set.rst 11 | sparse_table.rst 12 | algorithms.rst -------------------------------------------------------------------------------- /pydatastructs/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources(['__init__.py'], subdir: 'pydatastructs') 4 | 5 | subdir('utils') 6 | subdir('linear_data_structures') 7 | subdir('miscellaneous_data_structures') 8 | subdir('trees') 9 | subdir('graphs') 10 | subdir('strings') 11 | -------------------------------------------------------------------------------- /pydatastructs/utils/tests/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'test_misc_util.py', 7 | 'test_code_quality.py' 8 | ], 9 | subdir: 'pydatastructs/utils/tests', 10 | install_tag: 'tests' 11 | ) 12 | -------------------------------------------------------------------------------- /ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | #### Description of the problem 2 | 3 | 4 | #### Example of the problem 5 | 6 | 7 | #### References/Other comments 8 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: pyds-env 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.9 6 | - pip 7 | - pytest 8 | - pip: 9 | - codecov 10 | - pytest-cov 11 | - spin 12 | - meson 13 | - sphinx==5.0 14 | - sphinx-readable-theme==1.3.0 15 | - myst_nb==0.17.2 16 | - lcov 17 | - llvmlite 18 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/pydatastructs.rst: -------------------------------------------------------------------------------- 1 | Modules 2 | ======= 3 | 4 | .. automodule:: pydatastructs 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | 9 | linear_data_structures/linear_data_structures.rst 10 | graphs/graphs.rst 11 | strings/strings.rst 12 | trees/trees.rst 13 | miscellaneous_data_structures/miscellaneous_data_structures.rst -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/cpp/graph_bindings.hpp: -------------------------------------------------------------------------------- 1 | #ifndef GRAPH_BINDINGS_HPP 2 | #define GRAPH_BINDINGS_HPP 3 | 4 | #include 5 | 6 | extern PyTypeObject AdjacencyListGraphType; 7 | extern PyTypeObject AdjacencyListGraphNodeType; 8 | extern PyTypeObject AdjacencyMatrixGraphType; 9 | extern PyTypeObject AdjacencyMatrixGraphNodeType; 10 | 11 | #endif 12 | -------------------------------------------------------------------------------- /pydatastructs/graphs/tests/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'test_adjacency_list.py', 7 | 'test_adjacency_matrix.py', 8 | 'test_algorithms.py' 9 | ], 10 | subdir: 'pydatastructs/graphs/tests', 11 | install_tag: 'tests' 12 | ) 13 | -------------------------------------------------------------------------------- /scripts/build/dummy_submodules_data.py: -------------------------------------------------------------------------------- 1 | project = 'pydatastructs' 2 | 3 | modules = ['linear_data_structures', 'miscellaneous_data_structures', 'utils', 'trees', 'graphs'] 4 | 5 | backend = '_backend' 6 | 7 | cpp = 'cpp' 8 | 9 | dummy_submodules_list = [('_arrays.py', '_algorithms.py'), ('_stack.py',), ('_nodes.py', '_graph_utils.py',), ('_trees.py',), ('_graph.py',)] 10 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/linear_data_structures/linked_lists.rst: -------------------------------------------------------------------------------- 1 | Linked Lists 2 | ============ 3 | 4 | .. autoclass:: pydatastructs.SinglyLinkedList 5 | 6 | .. autoclass:: pydatastructs.DoublyLinkedList 7 | 8 | .. autoclass:: pydatastructs.SinglyCircularLinkedList 9 | 10 | .. autoclass:: pydatastructs.DoublyCircularLinkedList 11 | 12 | .. autoclass:: pydatastructs.SkipList -------------------------------------------------------------------------------- /pydatastructs/utils/raises_util.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | def raises(exception, code): 4 | """ 5 | Utility for testing exceptions. 6 | 7 | Parameters 8 | ========== 9 | 10 | exception 11 | A valid python exception 12 | code: lambda 13 | Code that causes exception 14 | """ 15 | with pytest.raises(exception): 16 | code() 17 | return True 18 | -------------------------------------------------------------------------------- /scripts/build/add_dummy_submodules.py: -------------------------------------------------------------------------------- 1 | from dummy_submodules_data import (project, modules, backend, 2 | cpp, dummy_submodules_list) 3 | 4 | def add_dummy_submodules(): 5 | for module, dummy_submodules in zip(modules, dummy_submodules_list): 6 | for dummy_submodule in dummy_submodules: 7 | open('/'.join([project, module, backend, cpp, dummy_submodule]), 'w+').close() 8 | 9 | add_dummy_submodules() 10 | -------------------------------------------------------------------------------- /pydatastructs/trees/_extensions.py: -------------------------------------------------------------------------------- 1 | from setuptools import Extension 2 | 3 | project = 'pydatastructs' 4 | 5 | module = 'trees' 6 | 7 | backend = '_backend' 8 | 9 | cpp = 'cpp' 10 | 11 | trees = '.'.join([project, module, backend, cpp, '_trees']) 12 | trees_sources = ['/'.join([project, module, backend, cpp, 13 | 'trees.cpp'])] 14 | 15 | extensions = [ 16 | Extension(trees, sources=trees_sources) 17 | ] 18 | -------------------------------------------------------------------------------- /scripts/build/delete_dummy_submodules.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dummy_submodules_data import (project, modules, backend, 3 | cpp, dummy_submodules_list) 4 | 5 | def delete_dummy_submodules(): 6 | for module, dummy_submodules in zip(modules, dummy_submodules_list): 7 | for dummy_submodule in dummy_submodules: 8 | os.remove('/'.join([project, module, backend, cpp, dummy_submodule])) 9 | 10 | delete_dummy_submodules() 11 | -------------------------------------------------------------------------------- /pydatastructs/trees/tests/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'benchmarks/test_binary_trees.py', 7 | 'test_binary_trees.py', 8 | 'test_heaps.py', 9 | 'test_m_ary_trees.py', 10 | 'test_space_partitioning_tree.py' 11 | ], 12 | subdir: 'pydatastructs/trees/tests', 13 | install_tag: 'tests' 14 | ) 15 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/_extensions.py: -------------------------------------------------------------------------------- 1 | from setuptools import Extension 2 | 3 | project = 'pydatastructs' 4 | 5 | module = 'miscellaneous_data_structures' 6 | 7 | backend = '_backend' 8 | 9 | cpp = 'cpp' 10 | 11 | stack = '.'.join([project, module, backend, cpp, '_stack']) 12 | stack_sources = ['/'.join([project, module, backend, cpp, 'stack', 'stack.cpp'])] 13 | 14 | extensions = [ 15 | Extension(stack, sources=stack_sources), 16 | ] 17 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["meson-python"] 3 | build-backend = "mesonpy" 4 | 5 | [project] 6 | name = "pydatastructs" 7 | version = "1.0.1.dev0" 8 | description = "Data structures and algorithms implemented using Python and C++" 9 | readme = "README.md" 10 | requires-python = ">=3.9" 11 | 12 | [tool.spin] 13 | package = "pydatastructs" 14 | 15 | [tool.spin.commands] 16 | Build = ["spin.cmds.meson.build"] 17 | Test = ["spin.cmds.meson.test"] 18 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/tests/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'benchmarks/__init__.py', 7 | 'benchmarks/test_algorithms.py', 8 | 'benchmarks/test_arrays.py', 9 | 'test_algorithms.py', 10 | 'test_arrays.py', 11 | 'test_linked_lists.py' 12 | ], 13 | subdir: 'pydatastructs/linear_data_structures/tests', 14 | install_tag: 'tests' 15 | ) 16 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/trees/binary_trees.rst: -------------------------------------------------------------------------------- 1 | Binary Trees 2 | ============ 3 | 4 | .. autoclass:: pydatastructs.BinaryTree 5 | 6 | .. autoclass:: pydatastructs.BinarySearchTree 7 | 8 | .. autoclass:: pydatastructs.AVLTree 9 | 10 | .. autoclass:: pydatastructs.BinaryIndexedTree 11 | 12 | .. autoclass:: pydatastructs.CartesianTree 13 | 14 | .. autoclass:: pydatastructs.Treap 15 | 16 | .. autoclass:: pydatastructs.SplayTree 17 | 18 | .. autoclass:: pydatastructs.RedBlackTree 19 | 20 | .. autoclass:: pydatastructs.BinaryTreeTraversal 21 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'test_binomial_trees.py', 7 | 'test_disjoint_set.py', 8 | 'test_multiset.py', 9 | 'test_queue.py', 10 | 'test_range_query_dynamic.py', 11 | 'test_range_query_static.py', 12 | 'test_stack.py' 13 | ], 14 | subdir: 'pydatastructs/miscellaneous_data_structures/tests', 15 | install_tag: 'tests' 16 | ) 17 | -------------------------------------------------------------------------------- /PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 3 | 4 | #### References to other Issues or PRs or Relevant literature 5 | 10 | 11 | 12 | #### Brief description of what is fixed or changed 13 | 14 | 15 | #### Other comments 16 | -------------------------------------------------------------------------------- /pydatastructs/utils/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [] 2 | 3 | from . import ( 4 | misc_util, 5 | testing_util, 6 | ) 7 | 8 | from .misc_util import ( 9 | TreeNode, 10 | MAryTreeNode, 11 | LinkedListNode, 12 | BinomialTreeNode, 13 | AdjacencyListGraphNode, 14 | AdjacencyMatrixGraphNode, 15 | GraphEdge, 16 | Set, 17 | CartesianTreeNode, 18 | RedBlackTreeNode, 19 | TrieNode, 20 | SkipNode, 21 | summation, 22 | greatest_common_divisor, 23 | minimum, 24 | Backend 25 | ) 26 | from .testing_util import test 27 | 28 | __all__.extend(misc_util.__all__) 29 | __all__.extend(testing_util.__all__) 30 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Gagandeep Singh 2 | Kartikei Mittal 3 | Umesh <23umesh.here@gmail.com> 4 | Rohan Singh 5 | Tarun Singh Tomar 6 | Saptashrungi Birajdar 7 | Rajiv Ranjan Singh 8 | Prashant Rawat 9 | Harsheet 10 | Pratik Goyal 11 | Jay Thorat 12 | Rajveer Singh Bharadwaj 13 | Kishan Ved 14 | Arvinder Singh Dhoul 15 | Harshita Kalani -------------------------------------------------------------------------------- /pydatastructs/trees/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'binary_trees.py', 7 | 'heaps.py', 8 | 'm_ary_trees.py', 9 | 'space_partitioning_trees.py' 10 | ], 11 | subdir: 'pydatastructs/trees' 12 | ) 13 | 14 | python.install_sources( 15 | ['_backend/__init__.py', '_backend/cpp/__init__.py'], 16 | subdir: 'pydatastructs/trees/_backend' 17 | ) 18 | 19 | python.extension_module( 20 | '_trees', 21 | '_backend/cpp/trees.cpp', 22 | install: true, 23 | subdir: 'pydatastructs/trees/_backend/cpp' 24 | ) 25 | 26 | subdir('tests') 27 | -------------------------------------------------------------------------------- /pydatastructs/graphs/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [] 2 | 3 | from . import graph 4 | from .graph import ( 5 | Graph 6 | ) 7 | __all__.extend(graph.__all__) 8 | 9 | from . import algorithms 10 | from . import adjacency_list 11 | from . import adjacency_matrix 12 | 13 | from .algorithms import ( 14 | breadth_first_search, 15 | breadth_first_search_parallel, 16 | minimum_spanning_tree, 17 | minimum_spanning_tree_parallel, 18 | strongly_connected_components, 19 | depth_first_search, 20 | shortest_paths, 21 | all_pair_shortest_paths, 22 | topological_sort, 23 | topological_sort_parallel, 24 | max_flow, 25 | find_bridges 26 | ) 27 | 28 | __all__.extend(algorithms.__all__) 29 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/graphs/algorithms.rst: -------------------------------------------------------------------------------- 1 | Algorithms 2 | ========== 3 | 4 | .. autofunction:: pydatastructs.breadth_first_search 5 | 6 | .. autofunction:: pydatastructs.breadth_first_search_parallel 7 | 8 | .. autofunction:: pydatastructs.minimum_spanning_tree 9 | 10 | .. autofunction:: pydatastructs.minimum_spanning_tree_parallel 11 | 12 | .. autofunction:: pydatastructs.strongly_connected_components 13 | 14 | .. autofunction:: pydatastructs.depth_first_search 15 | 16 | .. autofunction:: pydatastructs.shortest_paths 17 | 18 | .. autofunction:: pydatastructs.all_pair_shortest_paths 19 | 20 | .. autofunction:: pydatastructs.topological_sort 21 | 22 | .. autofunction:: pydatastructs.topological_sort_parallel 23 | 24 | .. autofunction:: pydatastructs.find_bridges -------------------------------------------------------------------------------- /pydatastructs/utils/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'misc_util.py', 7 | 'raises_util.py', 8 | 'testing_util.py' 9 | ], 10 | subdir: 'pydatastructs/utils' 11 | ) 12 | 13 | python.install_sources( 14 | ['_backend/__init__.py', '_backend/cpp/__init__.py'], 15 | subdir: 'pydatastructs/utils/_backend' 16 | ) 17 | 18 | cpp_args = [] 19 | if host_machine.system() == 'windows' 20 | cpp_args += ['/MD'] 21 | endif 22 | 23 | python.extension_module( 24 | '_nodes', 25 | [ 26 | '_backend/cpp/nodes.cpp', 27 | ], 28 | cpp_args: cpp_args, 29 | install: true, 30 | subdir: 'pydatastructs/utils/_backend/cpp' 31 | ) 32 | 33 | subdir('tests') -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/cpp/nodes.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "Node.hpp" 3 | #include "TreeNode.hpp" 4 | 5 | static struct PyModuleDef nodes_struct = { 6 | PyModuleDef_HEAD_INIT, 7 | "_nodes", 8 | 0, 9 | -1, 10 | NULL, 11 | }; 12 | 13 | PyMODINIT_FUNC PyInit__nodes(void) { 14 | PyObject *nodes = PyModule_Create(&nodes_struct); 15 | 16 | if (PyType_Ready(&NodeType) < 0) { 17 | return NULL; 18 | } 19 | Py_INCREF(&NodeType); 20 | PyModule_AddObject(nodes, "Node", reinterpret_cast(&NodeType)); 21 | 22 | if (PyType_Ready(&TreeNodeType) < 0) { 23 | return NULL; 24 | } 25 | Py_INCREF(&TreeNodeType); 26 | PyModule_AddObject(nodes, "TreeNode", reinterpret_cast(&TreeNodeType)); 27 | 28 | return nodes; 29 | } 30 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/test_binomial_trees.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.miscellaneous_data_structures.binomial_trees import BinomialTree 2 | from pydatastructs.utils.raises_util import raises 3 | from pydatastructs.utils.misc_util import BinomialTreeNode 4 | 5 | # only tests the corner cases 6 | def test_BinomialTree(): 7 | assert raises(TypeError, lambda: BinomialTree(1, 1)) 8 | assert raises(TypeError, lambda: BinomialTree(None, 1.5)) 9 | 10 | bt = BinomialTree() 11 | assert raises(TypeError, lambda: bt.add_sub_tree(None)) 12 | bt1 = BinomialTree(BinomialTreeNode(1, 1), 0) 13 | node = BinomialTreeNode(2, 2) 14 | node.add_children(BinomialTreeNode(3, 3)) 15 | bt2 = BinomialTree(node, 1) 16 | assert raises(ValueError, lambda: bt1.add_sub_tree(bt2)) 17 | assert bt1.is_empty is False 18 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_extensions.py: -------------------------------------------------------------------------------- 1 | from setuptools import Extension 2 | 3 | project = 'pydatastructs' 4 | 5 | module = 'linear_data_structures' 6 | 7 | backend = '_backend' 8 | 9 | cpp = 'cpp' 10 | 11 | arrays = '.'.join([project, module, backend, cpp, '_arrays']) 12 | arrays_sources = ['/'.join([project, module, backend, cpp, 13 | 'arrays', 'arrays.cpp'])] 14 | 15 | algorithms = '.'.join([project, module, backend, cpp, '_algorithms']) 16 | algorithms_sources = ['/'.join([project, module, backend, cpp, 17 | 'algorithms', 'algorithms.cpp'])] 18 | 19 | extensions = [ 20 | Extension(arrays, sources=arrays_sources, language="c++", extra_compile_args=["-std=c++17"]), 21 | Extension(algorithms, sources=algorithms_sources, language="c++", extra_compile_args=["-std=c++17"]) 22 | ] 23 | -------------------------------------------------------------------------------- /pydatastructs/trees/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [] 2 | 3 | from . import ( 4 | binary_trees, 5 | m_ary_trees, 6 | space_partitioning_trees, 7 | heaps, 8 | ) 9 | 10 | from .binary_trees import ( 11 | BinaryTree, 12 | BinarySearchTree, 13 | BinaryTreeTraversal, 14 | AVLTree, 15 | BinaryIndexedTree, 16 | CartesianTree, 17 | Treap, 18 | SplayTree, 19 | RedBlackTree 20 | ) 21 | __all__.extend(binary_trees.__all__) 22 | 23 | from .m_ary_trees import ( 24 | MAryTreeNode, MAryTree 25 | ) 26 | 27 | __all__.extend(m_ary_trees.__all__) 28 | 29 | from .space_partitioning_trees import ( 30 | OneDimensionalSegmentTree 31 | ) 32 | __all__.extend(space_partitioning_trees.__all__) 33 | 34 | from .heaps import ( 35 | BinaryHeap, 36 | TernaryHeap, 37 | DHeap, 38 | BinomialHeap 39 | ) 40 | __all__.extend(heaps.__all__) 41 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'algorithms.py', 7 | 'multiset.py', 8 | 'sparse_table.py', 9 | 'disjoint_set.py', 10 | 'queue.py', 11 | 'binomial_trees.py', 12 | 'segment_tree.py', 13 | 'stack.py' 14 | ], 15 | subdir: 'pydatastructs/miscellaneous_data_structures' 16 | ) 17 | 18 | python.install_sources( 19 | ['_backend/__init__.py', '_backend/cpp/__init__.py'], 20 | subdir: 'pydatastructs/miscellaneous_data_structures/_backend' 21 | ) 22 | 23 | python.extension_module( 24 | '_stack', 25 | '_backend/cpp/stack/stack.cpp', 26 | install: true, 27 | subdir: 'pydatastructs/miscellaneous_data_structures/_backend/cpp' 28 | ) 29 | 30 | subdir('tests') 31 | -------------------------------------------------------------------------------- /docs/source/authors.rst: -------------------------------------------------------------------------------- 1 | Authors 2 | ======= 3 | 4 | The following contributors wanted themselves to be added as 5 | authors of the project. If you too have contributed to PyDataStructs 6 | and want to be considered as author then feel free to open a PR editing 7 | ``docs/source/authors.rst`` and ``AUTHORS`` files. 8 | 9 | Gagandeep Singh 10 | 11 | Kartikei Mittal 12 | 13 | Umesh <23umesh.here@gmail.com> 14 | 15 | Rohan Singh 16 | 17 | Tarun Singh Tomar 18 | 19 | Saptashrungi Birajdar 20 | 21 | Rajiv Ranjan Singh 22 | 23 | Prashant Rawat 24 | 25 | Harsheet 26 | 27 | Pratik Goyal 28 | 29 | Jay Thorat 30 | 31 | Kishan Ved 32 | -------------------------------------------------------------------------------- /pydatastructs/graphs/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'adjacency_list.py', 7 | 'adjacency_matrix.py', 8 | 'algorithms.py', 9 | 'graph.py' 10 | ], 11 | subdir: 'pydatastructs/graphs' 12 | ) 13 | 14 | python.install_sources( 15 | ['_backend/__init__.py', '_backend/cpp/__init__.py'], 16 | subdir: 'pydatastructs/graphs/_backend' 17 | ) 18 | 19 | py_include = include_directories('../utils/_backend/cpp') 20 | 21 | python.extension_module( 22 | '_graph', 23 | [ 24 | '_backend/cpp/graph.cpp', 25 | '_backend/cpp/algorithms.cpp', 26 | '../utils/_backend/cpp/graph_utils.cpp', 27 | ], 28 | include_directories: py_include, 29 | install: true, 30 | subdir: 'pydatastructs/graphs/_backend/cpp' 31 | ) 32 | 33 | 34 | subdir('tests') -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.https://www.sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/test_multiset.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.miscellaneous_data_structures import Multiset 2 | 3 | def test_Multiset(): 4 | 5 | ms = Multiset() 6 | ms.add(5) 7 | ms.add(5) 8 | ms.add(3) 9 | ms.add(7) 10 | assert len(ms) == 4 11 | assert 5 in ms 12 | assert ms.count(5) == 2 13 | assert ms.count(3) == 1 14 | assert ms.count(-3) == 0 15 | assert not 4 in ms 16 | ms.remove(5) 17 | assert 5 in ms 18 | assert ms.lower_bound(5) == 5 19 | assert ms.upper_bound(5) == 7 20 | 21 | ms = Multiset(5, 3, 7, 2) 22 | 23 | assert len(ms) == 4 24 | assert 5 in ms 25 | assert ms.count(7) == 1 26 | assert not 4 in ms 27 | assert ms.lower_bound(3) == 3 28 | assert ms.upper_bound(3) == 5 29 | assert ms.upper_bound(7) is None 30 | 31 | ms.remove(5) 32 | 33 | assert len(ms) == 3 34 | assert not 5 in ms 35 | 36 | ms.add(4) 37 | 38 | assert 4 in ms 39 | assert len(ms) == 4 40 | -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/cpp/AdjacencyMatrixGraphNode.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ADJACENCY_MATRIX_GRAPH_NODE_HPP 2 | #define ADJACENCY_MATRIX_GRAPH_NODE_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | #include "GraphNode.hpp" 8 | 9 | extern PyTypeObject AdjacencyMatrixGraphNodeType; 10 | 11 | typedef struct { 12 | GraphNode super; 13 | } AdjacencyMatrixGraphNode; 14 | 15 | static void AdjacencyMatrixGraphNode_dealloc(AdjacencyMatrixGraphNode* self){ 16 | Py_TYPE(self)->tp_free(reinterpret_cast(self)); 17 | } 18 | 19 | static PyObject* AdjacencyMatrixGraphNode_new(PyTypeObject* type, PyObject* args, PyObject* kwds){ 20 | PyObject* base_obj = GraphNode_new(type, args, kwds); 21 | if (!base_obj) { 22 | return NULL; 23 | } 24 | 25 | AdjacencyMatrixGraphNode* self = reinterpret_cast(base_obj); 26 | self->super.type_tag = NodeType_::AdjacencyMatrixGraphNode; 27 | 28 | return reinterpret_cast(self); 29 | } 30 | 31 | #endif 32 | -------------------------------------------------------------------------------- /pydatastructs/graphs/_extensions.py: -------------------------------------------------------------------------------- 1 | from setuptools import Extension 2 | import os 3 | 4 | project = 'pydatastructs' 5 | 6 | module = 'graphs' 7 | 8 | backend = '_backend' 9 | 10 | cpp = 'cpp' 11 | 12 | graph = '.'.join([project, module, backend, cpp, '_graph']) 13 | graph_sources = ['/'.join([project, module, backend, cpp, 14 | 'graph.cpp']),"pydatastructs/utils/_backend/cpp/graph_utils.cpp"] 15 | algorithms = '.'.join([project, module, backend, cpp, '_algorithms']) 16 | algorithms_sources = ['/'.join([project, module, backend, cpp, 17 | 'algorithms.cpp']),"pydatastructs/utils/_backend/cpp/graph_utils.cpp"] 18 | 19 | include_dir = os.path.abspath(os.path.join(project, 'utils', '_backend', 'cpp')) 20 | 21 | extensions = [Extension(graph, sources=graph_sources,include_dirs=[include_dir], language="c++", extra_compile_args=["-std=c++17"]), 22 | Extension(algorithms, sources=algorithms_sources,include_dirs=[include_dir], language="c++", extra_compile_args=["-std=c++17"]), 23 | ] 24 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [] 2 | 3 | from . import ( 4 | stack, 5 | binomial_trees, 6 | queue, 7 | disjoint_set, 8 | sparse_table, 9 | ) 10 | 11 | from .binomial_trees import ( 12 | BinomialTree 13 | ) 14 | __all__.extend(binomial_trees.__all__) 15 | 16 | from .stack import ( 17 | Stack, 18 | ) 19 | __all__.extend(stack.__all__) 20 | 21 | from .queue import ( 22 | Queue, 23 | PriorityQueue 24 | ) 25 | __all__.extend(queue.__all__) 26 | 27 | from .disjoint_set import ( 28 | DisjointSetForest, 29 | ) 30 | __all__.extend(disjoint_set.__all__) 31 | 32 | from .sparse_table import ( 33 | SparseTable, 34 | ) 35 | __all__.extend(sparse_table.__all__) 36 | 37 | from .segment_tree import ( 38 | ArraySegmentTree, 39 | ) 40 | __all__.extend(segment_tree.__all__) 41 | 42 | from .algorithms import ( 43 | RangeQueryStatic, 44 | RangeQueryDynamic 45 | ) 46 | __all__.extend(algorithms.__all__) 47 | 48 | from .multiset import ( 49 | Multiset 50 | ) 51 | __all__.extend(multiset.__all__) 52 | -------------------------------------------------------------------------------- /scripts/build/develop.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import subprocess 3 | import sys 4 | 5 | def run_cmd(cmd): 6 | print(f"Running: {cmd}") 7 | result = subprocess.run(cmd, shell=True) 8 | if result.returncode != 0: 9 | print(f"Command failed: {cmd}", file=sys.stderr) 10 | sys.exit(result.returncode) 11 | 12 | if __name__ == "__main__": 13 | parser = argparse.ArgumentParser(description='Process build options.') 14 | parser.add_argument('--clean', action='store_true', 15 | help='Execute `git clean -fdx`') 16 | args = parser.parse_args() 17 | 18 | if args.clean: 19 | response = input("Warning: Executing `git clean -fdx` [Y/N]: ") 20 | if response.lower() in ("y", "yes"): 21 | run_cmd("git clean -fdx") 22 | else: 23 | print("Skipping clean step.") 24 | 25 | run_cmd("python scripts/build/add_dummy_submodules.py") 26 | run_cmd("pip install -e . --verbose") 27 | run_cmd("python scripts/build/delete_dummy_submodules.py") 28 | run_cmd("pip install -e . --verbose --force-reinstall") 29 | -------------------------------------------------------------------------------- /pydatastructs/trees/tests/test_space_partitioning_tree.py: -------------------------------------------------------------------------------- 1 | from pydatastructs import OneDimensionalSegmentTree 2 | from pydatastructs.utils.raises_util import raises 3 | 4 | def test_OneDimensionalSegmentTree(): 5 | ODST = OneDimensionalSegmentTree 6 | segt = ODST([(0, 5), (1, 6), (9, 13), (1, 2), (3, 8), (9, 20)]) 7 | assert segt.cache is False 8 | segt2 = ODST([(1, 4)]) 9 | assert str(segt2) == ("[(None, [False, 0, 1, False], None, None), " 10 | "(None, [True, 1, 1, True], ['(None, [True, 1, 4, True], None, None)'], " 11 | "None), (None, [False, 1, 4, False], None, None), (None, [True, 4, 4, True], " 12 | "None, None), (0, [False, 0, 1, True], None, 1), (2, [False, 1, 4, True], " 13 | "['(None, [True, 1, 4, True], None, None)'], 3), (4, [False, 0, 4, True], " 14 | "None, 5), (None, [False, 4, 5, False], None, None), (-3, [False, 0, 5, " 15 | "False], None, -2)]") 16 | assert len(segt.query(1.5)) == 3 17 | assert segt.cache is True 18 | assert len(segt.query(-1)) == 0 19 | assert len(segt.query(2.8)) == 2 20 | assert raises(ValueError, lambda: ODST([(1, 2, 3)])) 21 | -------------------------------------------------------------------------------- /scripts/build/install.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import subprocess 4 | import sys 5 | 6 | def run_cmd(cmd): 7 | print(f"Running: {cmd}") 8 | result = subprocess.run(cmd, shell=True) 9 | if result.returncode != 0: 10 | print(f"Command failed: {cmd}", file=sys.stderr) 11 | sys.exit(result.returncode) 12 | 13 | if __name__ == "__main__": 14 | parser = argparse.ArgumentParser(description="Build and install pydatastructs.") 15 | parser.add_argument("--clean", action="store_true", help="Execute `git clean -fdx`") 16 | args = parser.parse_args() 17 | 18 | if args.clean: 19 | response = input("Warning: Executing `git clean -fdx` [Y/N]: ") 20 | if response.lower() in ("y", "yes"): 21 | run_cmd("git clean -fdx") 22 | else: 23 | print("Skipping clean step.") 24 | 25 | run_cmd("python scripts/build/add_dummy_submodules.py") 26 | run_cmd("python setup.py build_ext --inplace") 27 | run_cmd("python -m pip install .") 28 | run_cmd("python scripts/build/delete_dummy_submodules.py") 29 | run_cmd("python -m pip install . --force-reinstall") 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled source # 2 | ################### 3 | *.com 4 | *.class 5 | *.dll 6 | *.exe 7 | *.o 8 | *.so 9 | *.gch 10 | 11 | # Packages # 12 | ############ 13 | # it's better to unpack these files and commit the raw source 14 | # git has its own built in compression methods 15 | *.7z 16 | *.dmg 17 | *.gz 18 | *.iso 19 | *.jar 20 | *.rar 21 | *.tar 22 | *.zip 23 | 24 | # Logs and databases # 25 | ###################### 26 | *.log 27 | *.sql 28 | *.sqlite 29 | 30 | # OS generated files # 31 | ###################### 32 | .DS_Store 33 | .DS_Store? 34 | ._* 35 | .Spotlight-V100 36 | .Trashes 37 | ehthumbs.db 38 | Thumbs.db 39 | pre_commit.ps1 40 | htmlcov 41 | *coverage* 42 | *.egg-info/ 43 | 44 | # Python Files # 45 | ################ 46 | *.pyc 47 | *.pyo 48 | *~ 49 | __pycache__/ 50 | .pytest_cache/ 51 | 52 | # Backup Files # 53 | ################ 54 | *.bak 55 | *.swp 56 | 57 | # Editor Files # 58 | ################ 59 | .vscode/ 60 | .vs/ 61 | 62 | # Other Files # 63 | ################ 64 | .idea/ 65 | build/ 66 | !scripts/build 67 | dist/ 68 | venv/ 69 | *.gem 70 | .bundle 71 | .sass-cache 72 | _site 73 | Gemfile.lock 74 | node_modules 75 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/meson.build: -------------------------------------------------------------------------------- 1 | python = import('python').find_installation(pure: false) 2 | 3 | python.install_sources( 4 | [ 5 | '__init__.py', 6 | 'algorithms.py', 7 | 'arrays.py', 8 | 'linked_lists.py' 9 | ], 10 | subdir: 'pydatastructs/linear_data_structures' 11 | ) 12 | 13 | python.install_sources( 14 | ['_backend/__init__.py', '_backend/cpp/__init__.py'], 15 | subdir: 'pydatastructs/linear_data_structures/_backend' 16 | ) 17 | 18 | python.install_sources( 19 | ['_backend/cpp/algorithms/llvm_algorithms.py'], 20 | subdir: 'pydatastructs/linear_data_structures/_backend/cpp/algorithms' 21 | ) 22 | 23 | python.extension_module( 24 | '_arrays', 25 | '_backend/cpp/arrays/arrays.cpp', 26 | install: true, 27 | subdir: 'pydatastructs/linear_data_structures/_backend/cpp' 28 | ) 29 | 30 | python.extension_module( 31 | '_algorithms', 32 | '_backend/cpp/algorithms/algorithms.cpp', 33 | install: true, 34 | subdir: 'pydatastructs/linear_data_structures/_backend/cpp', 35 | cpp_args: ['-std=c++17'], 36 | override_options: ['cpp_std=c++17'], 37 | ) 38 | 39 | subdir('tests') 40 | -------------------------------------------------------------------------------- /pydatastructs/graphs/_backend/cpp/algorithms.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "Algorithms.hpp" 3 | #include "AdjacencyList.hpp" 4 | #include "AdjacencyMatrix.hpp" 5 | 6 | static PyMethodDef AlgorithmsMethods[] = { 7 | {"bfs_adjacency_list", (PyCFunction)breadth_first_search_adjacency_list, METH_VARARGS | METH_KEYWORDS, "Run BFS on adjacency list with callback"}, 8 | {"bfs_adjacency_matrix", (PyCFunction)breadth_first_search_adjacency_matrix, METH_VARARGS | METH_KEYWORDS, "Run BFS on adjacency matrix with callback"}, 9 | {"minimum_spanning_tree_prim_adjacency_list", (PyCFunction)minimum_spanning_tree_prim_adjacency_list, METH_VARARGS | METH_KEYWORDS, "Run Prim's algorithm on adjacency list"}, 10 | {"shortest_paths_dijkstra_adjacency_list", (PyCFunction)shortest_paths_dijkstra_adjacency_list, METH_VARARGS | METH_KEYWORDS, "Dijkstra's algorithm for adjacency list graphs"}, 11 | {NULL, NULL, 0, NULL} 12 | }; 13 | 14 | static struct PyModuleDef algorithms_module = { 15 | PyModuleDef_HEAD_INIT, 16 | "_algorithms", NULL, -1, AlgorithmsMethods 17 | }; 18 | 19 | PyMODINIT_FUNC PyInit__algorithms(void) { 20 | return PyModule_Create(&algorithms_module); 21 | } 22 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [] 2 | 3 | from . import ( 4 | arrays, 5 | linked_lists, 6 | algorithms, 7 | ) 8 | 9 | from .arrays import ( 10 | OneDimensionalArray, 11 | DynamicOneDimensionalArray, 12 | MultiDimensionalArray, 13 | ArrayForTrees 14 | ) 15 | __all__.extend(arrays.__all__) 16 | 17 | from .linked_lists import ( 18 | SinglyLinkedList, 19 | DoublyLinkedList, 20 | SinglyCircularLinkedList, 21 | DoublyCircularLinkedList, 22 | SkipList 23 | ) 24 | __all__.extend(linked_lists.__all__) 25 | 26 | from .algorithms import ( 27 | merge_sort_parallel, 28 | brick_sort, 29 | brick_sort_parallel, 30 | heapsort, 31 | matrix_multiply_parallel, 32 | counting_sort, 33 | bucket_sort, 34 | cocktail_shaker_sort, 35 | quick_sort, 36 | longest_common_subsequence, 37 | is_ordered, 38 | upper_bound, 39 | lower_bound, 40 | longest_increasing_subsequence, 41 | next_permutation, 42 | prev_permutation, 43 | bubble_sort, 44 | linear_search, 45 | binary_search, 46 | jump_search, 47 | selection_sort, 48 | insertion_sort, 49 | intro_sort, 50 | shell_sort, 51 | radix_sort 52 | ) 53 | __all__.extend(algorithms.__all__) 54 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/multiset.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | 'Multiset' 3 | ] 4 | 5 | 6 | class Multiset: 7 | def __init__(self, *args): 8 | # TODO: Implement dict in pydatastructs 9 | self.counter = dict() 10 | from pydatastructs.trees import RedBlackTree 11 | self.tree = RedBlackTree() 12 | self._n = 0 13 | for arg in args: 14 | self.add(arg) 15 | 16 | def add(self, element): 17 | self.counter[element] = self.counter.get(element, 0) + 1 18 | self._n += 1 19 | if self.counter[element] == 1: 20 | self.tree.insert(element) 21 | 22 | def remove(self, element): 23 | if self.counter[element] == 1: 24 | self.tree.delete(element) 25 | if self.counter.get(element, 0) > 0: 26 | self._n -= 1 27 | self.counter[element] -= 1 28 | 29 | def lower_bound(self, element): 30 | return self.tree.lower_bound(element) 31 | 32 | def upper_bound(self, element): 33 | return self.tree.upper_bound(element) 34 | 35 | def __contains__(self, element): 36 | return self.counter.get(element, 0) > 0 37 | 38 | def __len__(self): 39 | return self._n 40 | 41 | def count(self, element): 42 | return self.counter.get(element, 0) 43 | -------------------------------------------------------------------------------- /pydatastructs/utils/_extensions.py: -------------------------------------------------------------------------------- 1 | from setuptools import Extension 2 | import os 3 | import sys 4 | import os 5 | import sys 6 | 7 | project = 'pydatastructs' 8 | module = 'utils' 9 | backend = '_backend' 10 | cpp = 'cpp' 11 | 12 | nodes = '.'.join([project, module, backend, cpp, '_nodes']) 13 | nodes_sources = [os.path.join(project, module, backend, cpp, 'nodes.cpp')] 14 | 15 | nodes_sources = [os.path.join(project, module, backend, cpp, 'nodes.cpp')] 16 | 17 | graph_utils = '.'.join([project, module, backend, cpp, '_graph_utils']) 18 | graph_utils_sources = [os.path.join(project, module, backend, cpp, 'graph_utils.cpp')] 19 | 20 | extra_compile_args = ["-std=c++17"] 21 | 22 | if sys.platform == "darwin": 23 | extra_compile_args.append("-mmacosx-version-min=10.13") 24 | elif sys.platform == "win32": 25 | extra_compile_args = ["/std:c++17"] 26 | graph_utils_sources = [os.path.join(project, module, backend, cpp, 'graph_utils.cpp')] 27 | 28 | extra_compile_args = ["-std=c++17"] 29 | 30 | if sys.platform == "darwin": 31 | extra_compile_args.append("-mmacosx-version-min=10.13") 32 | elif sys.platform == "win32": 33 | extra_compile_args = ["/std:c++17"] 34 | 35 | extensions = [ 36 | Extension( 37 | nodes, 38 | sources=nodes_sources, 39 | extra_compile_args=["-std=c++17"] 40 | ), 41 | Extension( 42 | graph_utils, 43 | sources=graph_utils_sources, 44 | extra_compile_args=["-std=c++17"] 45 | ) 46 | ] 47 | -------------------------------------------------------------------------------- /docs/source/pydatastructs/linear_data_structures/algorithms.rst: -------------------------------------------------------------------------------- 1 | Algorithms 2 | ========== 3 | 4 | .. autofunction:: pydatastructs.merge_sort_parallel 5 | 6 | .. autofunction:: pydatastructs.brick_sort 7 | 8 | .. autofunction:: pydatastructs.brick_sort_parallel 9 | 10 | .. autofunction:: pydatastructs.heapsort 11 | 12 | .. autofunction:: pydatastructs.matrix_multiply_parallel 13 | 14 | .. autofunction:: pydatastructs.counting_sort 15 | 16 | .. autofunction:: pydatastructs.bucket_sort 17 | 18 | .. autofunction:: pydatastructs.cocktail_shaker_sort 19 | 20 | .. autofunction:: pydatastructs.quick_sort 21 | 22 | .. autofunction:: pydatastructs.bubble_sort 23 | 24 | .. autofunction:: pydatastructs.selection_sort 25 | 26 | .. autofunction:: pydatastructs.insertion_sort 27 | 28 | .. autofunction:: pydatastructs.longest_common_subsequence 29 | 30 | .. autofunction:: pydatastructs.is_ordered 31 | 32 | .. autofunction:: pydatastructs.upper_bound 33 | 34 | .. autofunction:: pydatastructs.lower_bound 35 | 36 | .. autofunction:: pydatastructs.longest_increasing_subsequence 37 | 38 | .. autofunction:: pydatastructs.next_permutation 39 | 40 | .. autofunction:: pydatastructs.prev_permutation 41 | 42 | .. autofunction:: pydatastructs.linear_search 43 | 44 | .. autofunction:: pydatastructs.binary_search 45 | 46 | .. autofunction:: pydatastructs.jump_search 47 | 48 | .. autofunction:: pydatastructs.intro_sort 49 | 50 | .. autofunction:: pydatastructs.shell_sort 51 | 52 | .. autofunction:: pydatastructs.radix_sort -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | from pydatastructs import utils 3 | from pydatastructs import linear_data_structures 4 | from pydatastructs import miscellaneous_data_structures 5 | from pydatastructs import trees 6 | from pydatastructs import graphs 7 | 8 | with open("README.md", "r") as fh: 9 | long_description = fh.read() 10 | 11 | extensions = [] 12 | 13 | extensions.extend(utils._extensions.extensions) 14 | extensions.extend(linear_data_structures._extensions.extensions) 15 | extensions.extend(miscellaneous_data_structures._extensions.extensions) 16 | extensions.extend(trees._extensions.extensions) 17 | extensions.extend(graphs._extensions.extensions) 18 | 19 | setuptools.setup( 20 | name="cz-pydatastructs", 21 | version="1.0.1-dev", 22 | author="PyDataStructs Development Team", 23 | author_email="pydatastructs@googlegroups.com", 24 | description="A python package for data structures", 25 | long_description=long_description, 26 | long_description_content_type="text/markdown", 27 | url="https://github.com/codezonediitj/pydatastructs", 28 | packages=setuptools.find_packages(), 29 | classifiers=[ 30 | "Programming Language :: Python :: 3", 31 | "License :: OSI Approved :: BSD License", 32 | "Operating System :: OS Independent", 33 | "Topic :: Scientific/Engineering", 34 | "Topic :: Scientific/Engineering :: Information Analysis", 35 | "Topic :: Software Development :: Libraries" 36 | ], 37 | python_requires='>=3.9', 38 | ext_modules=extensions 39 | ) 40 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_backend/cpp/arrays/DynamicArray.hpp: -------------------------------------------------------------------------------- 1 | #ifndef LINEAR_DATA_STRUCTURES_DYNAMIC_ARRAY_HPP 2 | #define LINEAR_DATA_STRUCTURES_DYNAMIC_ARRAY_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | #include "Array.hpp" 8 | 9 | typedef struct { 10 | PyObject_HEAD 11 | } DynamicArray; 12 | 13 | static PyTypeObject DynamicArrayType = { 14 | /* tp_name */ PyVarObject_HEAD_INIT(NULL, 0) "DynamicArray", 15 | /* tp_basicsize */ sizeof(DynamicArray), 16 | /* tp_itemsize */ 0, 17 | /* tp_dealloc */ 0, 18 | /* tp_print */ 0, 19 | /* tp_getattr */ 0, 20 | /* tp_setattr */ 0, 21 | /* tp_reserved */ 0, 22 | /* tp_repr */ 0, 23 | /* tp_as_number */ 0, 24 | /* tp_as_sequence */ 0, 25 | /* tp_as_mapping */ 0, 26 | /* tp_hash */ 0, 27 | /* tp_call */ 0, 28 | /* tp_str */ 0, 29 | /* tp_getattro */ 0, 30 | /* tp_setattro */ 0, 31 | /* tp_as_buffer */ 0, 32 | /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, 33 | /* tp_doc */ 0, 34 | /* tp_traverse */ 0, 35 | /* tp_clear */ 0, 36 | /* tp_richcompare */ 0, 37 | /* tp_weaklistoffset */ 0, 38 | /* tp_iter */ 0, 39 | /* tp_iternext */ 0, 40 | /* tp_methods */ 0, 41 | /* tp_members */ 0, 42 | /* tp_getset */ 0, 43 | /* tp_base */ &ArrayType, 44 | /* tp_dict */ 0, 45 | /* tp_descr_get */ 0, 46 | /* tp_descr_set */ 0, 47 | /* tp_dictoffset */ 0, 48 | /* tp_init */ 0, 49 | /* tp_alloc */ 0, 50 | /* tp_new */ 0, 51 | }; 52 | 53 | #endif 54 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019-2021 PyDataStructs Development Team 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | a. Redistributions of source code must retain the above copyright notice, 9 | this list of conditions and the following disclaimer. 10 | b. Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | c. Neither the name of PyDataStructs nor the names of its contributors 14 | may be used to endorse or promote products derived from this software 15 | without specific prior written permission. 16 | 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 | ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR 22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 26 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 27 | OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 28 | DAMAGE. 29 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/_backend/cpp/stack/stack.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "ArrayStack.hpp" 3 | 4 | static struct PyModuleDef stack_struct = { 5 | PyModuleDef_HEAD_INIT, 6 | "_stack", 7 | 0, 8 | -1, 9 | NULL, 10 | }; 11 | 12 | PyMODINIT_FUNC PyInit__stack(void) { 13 | Py_Initialize(); 14 | PyObject *stack = PyModule_Create(&stack_struct); 15 | 16 | if (PyType_Ready(&ArrayStackType) < 0) { 17 | return NULL; 18 | } 19 | Py_INCREF(&ArrayStackType); 20 | PyModule_AddObject(stack, "ArrayStack", reinterpret_cast(&ArrayStackType)); 21 | 22 | if (PyType_Ready(&ArrayType) < 0) { 23 | return NULL; 24 | } 25 | Py_INCREF(&ArrayType); 26 | PyModule_AddObject(stack, "Array", reinterpret_cast(&ArrayType)); 27 | 28 | if (PyType_Ready(&OneDimensionalArrayType) < 0) { 29 | return NULL; 30 | } 31 | Py_INCREF(&OneDimensionalArrayType); 32 | PyModule_AddObject(stack, "OneDimensionalArray", reinterpret_cast(&OneDimensionalArrayType)); 33 | 34 | if (PyType_Ready(&DynamicArrayType) < 0) { 35 | return NULL; 36 | } 37 | Py_INCREF(&DynamicArrayType); 38 | PyModule_AddObject(stack, "DynamicArray", reinterpret_cast(&DynamicArrayType)); 39 | 40 | if (PyType_Ready(&DynamicOneDimensionalArrayType) < 0) { 41 | return NULL; 42 | } 43 | Py_INCREF(&DynamicOneDimensionalArrayType); 44 | PyModule_AddObject(stack, "DynamicOneDimensionalArray", reinterpret_cast(&DynamicOneDimensionalArrayType)); 45 | 46 | 47 | return stack; 48 | } 49 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_backend/cpp/algorithms/algorithms.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "quick_sort.hpp" 3 | #include "quadratic_time_sort.hpp" 4 | #include "misc_algorithms.hpp" 5 | 6 | static PyMethodDef algorithms_PyMethodDef[] = { 7 | {"quick_sort", (PyCFunction) quick_sort, 8 | METH_VARARGS | METH_KEYWORDS, ""}, 9 | {"bubble_sort", (PyCFunction) bubble_sort, 10 | METH_VARARGS | METH_KEYWORDS, ""}, 11 | {"bubble_sort_llvm", (PyCFunction)bubble_sort_llvm, 12 | METH_VARARGS | METH_KEYWORDS, ""}, 13 | {"selection_sort_llvm", (PyCFunction)selection_sort_llvm, 14 | METH_VARARGS | METH_KEYWORDS, ""}, 15 | {"selection_sort", (PyCFunction) selection_sort, 16 | METH_VARARGS | METH_KEYWORDS, ""}, 17 | {"insertion_sort", (PyCFunction) insertion_sort, 18 | METH_VARARGS | METH_KEYWORDS, ""}, 19 | {"is_ordered", (PyCFunction) is_ordered, 20 | METH_VARARGS | METH_KEYWORDS, ""}, 21 | {"linear_search", (PyCFunction) linear_search, 22 | METH_VARARGS | METH_KEYWORDS, ""}, 23 | {"binary_search", (PyCFunction) binary_search, 24 | METH_VARARGS | METH_KEYWORDS, ""}, 25 | {"jump_search", (PyCFunction) jump_search, 26 | METH_VARARGS | METH_KEYWORDS, ""}, 27 | {NULL, NULL, 0, NULL} /* Sentinel */ 28 | }; 29 | 30 | static struct PyModuleDef algorithms_struct = { 31 | PyModuleDef_HEAD_INIT, 32 | "_algorithms", 33 | 0, 34 | -1, 35 | algorithms_PyMethodDef 36 | }; 37 | 38 | PyMODINIT_FUNC PyInit__algorithms(void) { 39 | Py_Initialize(); 40 | PyObject *algorithms = PyModule_Create(&algorithms_struct); 41 | return algorithms; 42 | } 43 | -------------------------------------------------------------------------------- /pydatastructs/strings/tests/test_trie.py: -------------------------------------------------------------------------------- 1 | from pydatastructs import Trie 2 | 3 | def test_Trie(): 4 | 5 | strings = ["A", "to", "tea", "ted", "ten", "i", 6 | "in", "inn", "Amfn", "snbr"] 7 | trie = Trie() 8 | for string in strings: 9 | trie.insert(string) 10 | 11 | prefix_strings = ["te", "t", "Am", "snb"] 12 | 13 | for string in strings: 14 | assert trie.is_inserted(string) 15 | 16 | for string in strings[::-1]: 17 | assert trie.is_inserted(string) 18 | 19 | for string in prefix_strings: 20 | assert trie.is_present(string) 21 | assert not trie.is_inserted(string) 22 | 23 | assert sorted(trie.strings_with_prefix("t")) == ['tea', 'ted', 'ten', 'to'] 24 | assert sorted(trie.strings_with_prefix("te")) == ["tea", "ted", "ten"] 25 | assert trie.strings_with_prefix("i") == ["i", "in", "inn"] 26 | assert trie.strings_with_prefix("a") == [] 27 | 28 | remove_order = ["to", "tea", "ted", "ten", "inn", "in", "A"] 29 | 30 | assert trie.delete("z") is None 31 | 32 | for string in remove_order: 33 | trie.delete(string) 34 | for present in strings: 35 | if present == string: 36 | assert not trie.is_inserted(present) 37 | else: 38 | assert trie.is_present(present) 39 | assert trie.is_inserted(present) 40 | strings.remove(string) 41 | 42 | prefix_strings_1 = ["dict", "dicts", "dicts_lists_tuples"] 43 | trie_1 = Trie() 44 | 45 | for i in range(len(prefix_strings_1)): 46 | trie_1.insert(prefix_strings_1[i]) 47 | for j in range(i + 1): 48 | assert trie_1.is_inserted(prefix_strings_1[j]) 49 | assert trie_1.is_present(prefix_strings_1[j]) 50 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_backend/cpp/arrays/arrays.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "Array.hpp" 3 | #include "OneDimensionalArray.hpp" 4 | #include "DynamicArray.hpp" 5 | #include "DynamicOneDimensionalArray.hpp" 6 | #include "ArrayForTrees.hpp" 7 | 8 | static struct PyModuleDef arrays_struct = { 9 | PyModuleDef_HEAD_INIT, 10 | "_arrays", 11 | 0, 12 | -1, 13 | NULL, 14 | }; 15 | 16 | PyMODINIT_FUNC PyInit__arrays(void) { 17 | Py_Initialize(); 18 | PyObject *arrays = PyModule_Create(&arrays_struct); 19 | 20 | if (PyType_Ready(&ArrayType) < 0) { 21 | return NULL; 22 | } 23 | Py_INCREF(&ArrayType); 24 | PyModule_AddObject(arrays, "Array", reinterpret_cast(&ArrayType)); 25 | 26 | if (PyType_Ready(&OneDimensionalArrayType) < 0) { 27 | return NULL; 28 | } 29 | Py_INCREF(&OneDimensionalArrayType); 30 | PyModule_AddObject(arrays, "OneDimensionalArray", reinterpret_cast(&OneDimensionalArrayType)); 31 | 32 | if (PyType_Ready(&DynamicArrayType) < 0) { 33 | return NULL; 34 | } 35 | Py_INCREF(&DynamicArrayType); 36 | PyModule_AddObject(arrays, "DynamicArray", reinterpret_cast(&DynamicArrayType)); 37 | 38 | if (PyType_Ready(&DynamicOneDimensionalArrayType) < 0) { 39 | return NULL; 40 | } 41 | Py_INCREF(&DynamicOneDimensionalArrayType); 42 | PyModule_AddObject(arrays, "DynamicOneDimensionalArray", reinterpret_cast(&DynamicOneDimensionalArrayType)); 43 | 44 | if (PyType_Ready(&ArrayForTreesType) < 0) { 45 | return NULL; 46 | } 47 | Py_INCREF(&ArrayForTreesType); 48 | PyModule_AddObject(arrays, "ArrayForTrees", reinterpret_cast(&ArrayForTreesType)); 49 | 50 | return arrays; 51 | } 52 | -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/cpp/Node.hpp: -------------------------------------------------------------------------------- 1 | #ifndef UTILS_NODE_HPP 2 | #define UTILS_NODE_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | #include "utils.hpp" 8 | 9 | typedef struct { 10 | PyObject_HEAD 11 | NodeType_ type_tag; 12 | } Node; 13 | // Node is an abstract class representing a Node 14 | 15 | static void Node_dealloc(Node *self) { 16 | Py_TYPE(self)->tp_free(reinterpret_cast(self)); 17 | } 18 | 19 | static struct PyMemberDef Node_PyMemberDef[] = { 20 | {"type_tag", T_INT, offsetof(Node, type_tag), 0, "Node type_tag"}, 21 | {NULL}, 22 | }; 23 | 24 | 25 | static PyTypeObject NodeType = { 26 | /* tp_name */ PyVarObject_HEAD_INIT(NULL, 0) "Node", 27 | /* tp_basicsize */ sizeof(Node), 28 | /* tp_itemsize */ 0, 29 | /* tp_dealloc */ (destructor) Node_dealloc, 30 | /* tp_print */ 0, 31 | /* tp_getattr */ 0, 32 | /* tp_setattr */ 0, 33 | /* tp_reserved */ 0, 34 | /* tp_repr */ 0, 35 | /* tp_as_number */ 0, 36 | /* tp_as_sequence */ 0, 37 | /* tp_as_mapping */ 0, 38 | /* tp_hash */ 0, 39 | /* tp_call */ 0, 40 | /* tp_str */ 0, 41 | /* tp_getattro */ 0, 42 | /* tp_setattro */ 0, 43 | /* tp_as_buffer */ 0, 44 | /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, 45 | /* tp_doc */ 0, 46 | /* tp_traverse */ 0, 47 | /* tp_clear */ 0, 48 | /* tp_richcompare */ 0, 49 | /* tp_weaklistoffset */ 0, 50 | /* tp_iter */ 0, 51 | /* tp_iternext */ 0, 52 | /* tp_methods */ 0, 53 | /* tp_members */ Node_PyMemberDef, 54 | /* tp_getset */ 0, 55 | /* tp_base */ &PyBaseObject_Type, 56 | /* tp_dict */ 0, 57 | /* tp_descr_get */ 0, 58 | /* tp_descr_set */ 0, 59 | /* tp_dictoffset */ 0, 60 | /* tp_init */ 0, 61 | /* tp_alloc */ 0, 62 | /* tp_new */ 0, 63 | }; 64 | 65 | #endif 66 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_backend/cpp/arrays/Array.hpp: -------------------------------------------------------------------------------- 1 | #ifndef LINEAR_DATA_STRUCTURES_ARRAY_HPP 2 | #define LINEAR_DATA_STRUCTURES_ARRAY_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | 8 | typedef struct { 9 | PyObject_HEAD 10 | } Array; 11 | 12 | static void Array_dealloc(Array *self) { 13 | Py_TYPE(self)->tp_free(reinterpret_cast(self)); 14 | } 15 | 16 | static PyObject* Array___new__(PyTypeObject* type, PyObject *args, 17 | PyObject *kwds) { 18 | Array *self; 19 | self = reinterpret_cast(type->tp_alloc(type, 0)); 20 | return reinterpret_cast(self); 21 | } 22 | 23 | static PyObject* Array___str__(Array *self) { 24 | PyObject* self__data = PyObject_GetAttrString(reinterpret_cast(self), "_data"); 25 | if ( !self__data ) { 26 | return NULL; 27 | } 28 | return PyObject_Str(self__data); 29 | } 30 | 31 | static PyTypeObject ArrayType = { 32 | /* tp_name */ PyVarObject_HEAD_INIT(NULL, 0) "Array", 33 | /* tp_basicsize */ sizeof(Array), 34 | /* tp_itemsize */ 0, 35 | /* tp_dealloc */ (destructor) Array_dealloc, 36 | /* tp_print */ 0, 37 | /* tp_getattr */ 0, 38 | /* tp_setattr */ 0, 39 | /* tp_reserved */ 0, 40 | /* tp_repr */ 0, 41 | /* tp_as_number */ 0, 42 | /* tp_as_sequence */ 0, 43 | /* tp_as_mapping */ 0, 44 | /* tp_hash */ 0, 45 | /* tp_call */ 0, 46 | /* tp_str */ (reprfunc) Array___str__, 47 | /* tp_getattro */ 0, 48 | /* tp_setattro */ 0, 49 | /* tp_as_buffer */ 0, 50 | /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, 51 | /* tp_doc */ 0, 52 | /* tp_traverse */ 0, 53 | /* tp_clear */ 0, 54 | /* tp_richcompare */ 0, 55 | /* tp_weaklistoffset */ 0, 56 | /* tp_iter */ 0, 57 | /* tp_iternext */ 0, 58 | /* tp_methods */ 0, 59 | /* tp_members */ 0, 60 | /* tp_getset */ 0, 61 | /* tp_base */ &PyBaseObject_Type, 62 | /* tp_dict */ 0, 63 | /* tp_descr_get */ 0, 64 | /* tp_descr_set */ 0, 65 | /* tp_dictoffset */ 0, 66 | /* tp_init */ 0, 67 | /* tp_alloc */ 0, 68 | /* tp_new */ Array___new__, 69 | }; 70 | 71 | #endif 72 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/test_disjoint_set.py: -------------------------------------------------------------------------------- 1 | from pydatastructs import DisjointSetForest 2 | from pydatastructs.utils.raises_util import raises 3 | 4 | def test_DisjointSetForest(): 5 | 6 | dst = DisjointSetForest() 7 | for i in range(8): 8 | dst.make_set(i+1) 9 | 10 | dst.union(1, 2) 11 | dst.union(1, 5) 12 | assert dst.find_size(2) == 3 13 | dst.union(1, 6) 14 | dst.union(1, 8) 15 | dst.union(3, 4) 16 | assert dst.find_size(3) == 2 17 | 18 | assert (dst.find_root(1) == dst.find_root(2) == 19 | dst.find_root(5) == dst.find_root(6) == dst.find_root(8)) 20 | assert dst.disjoint_sets() == [[1, 2, 5, 6, 8], [3, 4], [7]] 21 | assert dst.find_root(3) == dst.find_root(4) 22 | assert dst.find_root(7).key == 7 23 | 24 | assert raises(KeyError, lambda: dst.find_root(9)) 25 | assert raises(KeyError, lambda: dst.find_size(9)) 26 | dst.union(3, 1) 27 | assert dst.find_root(3).key == 1 28 | assert dst.find_root(5).key == 1 29 | dst.make_root(6) 30 | assert dst.disjoint_sets() == [[1, 2, 3, 4, 5, 6, 8], [7]] 31 | assert dst.find_root(3).key == 6 32 | assert dst.find_root(5).key == 6 33 | dst.make_root(5) 34 | assert dst.find_root(1).key == 5 35 | assert dst.find_root(5).key == 5 36 | assert raises(KeyError, lambda: dst.make_root(9)) 37 | 38 | dst = DisjointSetForest() 39 | for i in range(6): 40 | dst.make_set(i) 41 | assert dst.tree[2].size == 1 42 | dst.union(2, 3) 43 | assert dst.tree[2].size == 2 44 | assert dst.tree[3].size == 1 45 | dst.union(1, 4) 46 | dst.union(2, 4) 47 | assert dst.disjoint_sets() == [[0], [1, 2, 3, 4], [5]] 48 | # current tree 49 | ############### 50 | # 2 51 | # / \ 52 | # 1 3 53 | # / 54 | # 4 55 | ############### 56 | assert dst.tree[2].size == 4 57 | assert dst.tree[1].size == 2 58 | assert dst.tree[3].size == dst.tree[4].size == 1 59 | dst.make_root(4) 60 | # New tree 61 | ############### 62 | # 4 63 | # | 64 | # 2 65 | # / \ 66 | # 1 3 67 | ############### 68 | assert dst.tree[4].size == 4 69 | assert dst.tree[2].size == 3 70 | assert dst.tree[1].size == dst.tree[3].size == 1 71 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/test_range_query_static.py: -------------------------------------------------------------------------------- 1 | from pydatastructs import ( 2 | RangeQueryStatic, minimum, 3 | greatest_common_divisor, summation, 4 | OneDimensionalArray) 5 | from pydatastructs.utils.raises_util import raises 6 | import random, math 7 | 8 | def _test_RangeQueryStatic_common(func, gen_expected): 9 | 10 | array = OneDimensionalArray(int, []) 11 | raises(ValueError, lambda: RangeQueryStatic(array, func)) 12 | 13 | array = OneDimensionalArray(int, [1]) 14 | rq = RangeQueryStatic(array, func) 15 | assert rq.query(0, 0) == 1 16 | raises(ValueError, lambda: rq.query(0, -1)) 17 | raises(IndexError, lambda: rq.query(0, 1)) 18 | 19 | array_sizes = [3, 6, 12, 24, 48, 96] 20 | random.seed(0) 21 | for array_size in array_sizes: 22 | data = random.sample(range(-2*array_size, 2*array_size), array_size) 23 | array = OneDimensionalArray(int, data) 24 | 25 | expected = [] 26 | inputs = [] 27 | for i in range(array_size): 28 | for j in range(i + 1, array_size): 29 | inputs.append((i, j)) 30 | expected.append(gen_expected(data, i, j)) 31 | 32 | data_structures = ["array", "sparse_table"] 33 | for ds in data_structures: 34 | rmq = RangeQueryStatic(array, func, data_structure=ds) 35 | for input, correct in zip(inputs, expected): 36 | assert rmq.query(input[0], input[1]) == correct 37 | 38 | def test_RangeQueryStatic_minimum(): 39 | 40 | def _gen_minimum_expected(data, i, j): 41 | return min(data[i:j + 1]) 42 | 43 | _test_RangeQueryStatic_common(minimum, _gen_minimum_expected) 44 | 45 | def test_RangeQueryStatic_greatest_common_divisor(): 46 | 47 | def _gen_gcd_expected(data, i, j): 48 | if j == i: 49 | return data[i] 50 | else: 51 | expected_gcd = math.gcd(data[i], data[i + 1]) 52 | for idx in range(i + 2, j + 1): 53 | expected_gcd = math.gcd(expected_gcd, data[idx]) 54 | return expected_gcd 55 | 56 | _test_RangeQueryStatic_common(greatest_common_divisor, _gen_gcd_expected) 57 | 58 | def test_RangeQueryStatic_summation(): 59 | 60 | def _gen_summation_expected(data, i, j): 61 | return sum(data[i:j + 1]) 62 | 63 | return _test_RangeQueryStatic_common(summation, _gen_summation_expected) 64 | -------------------------------------------------------------------------------- /pydatastructs/graphs/tests/test_adjacency_matrix.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.graphs import Graph 2 | from pydatastructs.utils import AdjacencyMatrixGraphNode 3 | from pydatastructs.utils.raises_util import raises 4 | from pydatastructs.utils.misc_util import Backend 5 | 6 | def test_AdjacencyMatrix(): 7 | v_0 = AdjacencyMatrixGraphNode(0, 0) 8 | v_1 = AdjacencyMatrixGraphNode(1, 1) 9 | v_2 = AdjacencyMatrixGraphNode(2, 2) 10 | g = Graph(v_0, v_1, v_2) 11 | g.add_edge(0, 1, 0) 12 | g.add_edge(1, 2, 0) 13 | g.add_edge(2, 0, 0) 14 | e1 = g.get_edge(0, 1) 15 | e2 = g.get_edge(1, 2) 16 | e3 = g.get_edge(2, 0) 17 | assert (e1.source.name, e1.target.name) == ('0', '1') 18 | assert (e2.source.name, e2.target.name) == ('1', '2') 19 | assert (e3.source.name, e3.target.name) == ('2', '0') 20 | assert g.is_adjacent(0, 1) is True 21 | assert g.is_adjacent(1, 2) is True 22 | assert g.is_adjacent(2, 0) is True 23 | assert g.is_adjacent(1, 0) is False 24 | assert g.is_adjacent(2, 1) is False 25 | assert g.is_adjacent(0, 2) is False 26 | neighbors = g.neighbors(0) 27 | assert neighbors == [v_1] 28 | g.remove_edge(0, 1) 29 | assert g.is_adjacent(0, 1) is False 30 | assert raises(ValueError, lambda: g.add_edge('u', 'v')) 31 | assert raises(ValueError, lambda: g.add_edge('v', 'x')) 32 | assert raises(ValueError, lambda: g.add_edge(2, 3)) 33 | assert raises(ValueError, lambda: g.add_edge(3, 2)) 34 | 35 | v_3 = AdjacencyMatrixGraphNode('0', 0, backend = Backend.CPP) 36 | v_4 = AdjacencyMatrixGraphNode('1', 1, backend = Backend.CPP) 37 | v_5 = AdjacencyMatrixGraphNode('2', 2, backend = Backend.CPP) 38 | g2 = Graph(v_3, v_4, v_5, implementation = 'adjacency_matrix', backend = Backend.CPP) 39 | g2.add_edge('0', '1', 0) 40 | g2.add_edge('1', '2', 0) 41 | g2.add_edge('2', '0', 0) 42 | assert g2.is_adjacent('0', '1') is True 43 | assert g2.is_adjacent('1', '2') is True 44 | assert g2.is_adjacent('2', '0') is True 45 | assert g2.is_adjacent('1', '0') is False 46 | assert g2.is_adjacent('2', '1') is False 47 | assert g2.is_adjacent('0', '2') is False 48 | neighbors = g2.neighbors('0') 49 | assert neighbors == [v_4] 50 | g2.remove_edge('0', '1') 51 | assert g2.is_adjacent('0', '1') is False 52 | assert raises(ValueError, lambda: g2.add_edge('u', 'v')) 53 | assert raises(ValueError, lambda: g2.add_edge('v', 'x')) 54 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | # -- Imports ---------------- 18 | 19 | import sphinx_readable_theme 20 | 21 | # -- Project information ----------------------------------------------------- 22 | 23 | project = 'PyDataStructs' 24 | copyright = '2021, PyDataStructs Development Team' 25 | author = 'PyDataStructs Development Team' 26 | master_doc = 'index' 27 | 28 | # The full version, including alpha/beta/rc tags 29 | release = '1.0.1-dev' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # Add any Sphinx extension module names here, as strings. They can be 35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 36 | # ones. 37 | extensions = [ 38 | 'sphinx.ext.autodoc', 39 | 'sphinx.ext.napoleon', 40 | 'myst_nb' 41 | ] 42 | 43 | jupyter_execute_notebooks = "off" 44 | 45 | napoleon_numpy_docstring = True 46 | 47 | # Add any paths that contain templates here, relative to this directory. 48 | templates_path = [] 49 | 50 | # List of patterns, relative to source directory, that match files and 51 | # directories to ignore when looking for source files. 52 | # This pattern also affects html_static_path and html_extra_path. 53 | exclude_patterns = [] 54 | 55 | 56 | # -- Options for HTML output ------------------------------------------------- 57 | 58 | # The theme to use for HTML and HTML Help pages. See the documentation for 59 | # a list of builtin themes. 60 | # 61 | html_theme = 'readable' 62 | 63 | html_theme_path = [sphinx_readable_theme.get_html_theme_path()] 64 | 65 | # Add any paths that contain custom static files (such as style sheets) here, 66 | # relative to this directory. They are copied after the builtin static files, 67 | # so a file named "default.css" will overwrite the builtin "default.css". 68 | html_static_path = [] 69 | 70 | autodoc_default_options = { 71 | 'member-order': 'bysource', 72 | 'members': True, 73 | 'undoc-members': True, 74 | 'special-members': True, 75 | 'exclude-members': '__new__, methods, __slots__, __dict__, __weakref__' 76 | } 77 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/test_range_query_dynamic.py: -------------------------------------------------------------------------------- 1 | from pydatastructs import ( 2 | RangeQueryDynamic, minimum, 3 | greatest_common_divisor, summation, 4 | OneDimensionalArray) 5 | from pydatastructs.utils.raises_util import raises 6 | import random, math 7 | from copy import deepcopy 8 | 9 | def _test_RangeQueryDynamic_common(func, gen_expected): 10 | 11 | array = OneDimensionalArray(int, []) 12 | raises(ValueError, lambda: RangeQueryDynamic(array, func)) 13 | 14 | array = OneDimensionalArray(int, [1]) 15 | rq = RangeQueryDynamic(array, func) 16 | assert rq.query(0, 0) == 1 17 | raises(ValueError, lambda: rq.query(0, -1)) 18 | raises(IndexError, lambda: rq.query(0, 1)) 19 | 20 | array_sizes = [3, 6, 12, 24, 48, 96] 21 | random.seed(0) 22 | for array_size in array_sizes: 23 | inputs = [] 24 | for i in range(array_size): 25 | for j in range(i + 1, array_size): 26 | inputs.append((i, j)) 27 | 28 | data_structures = ["array", "segment_tree"] 29 | for ds in data_structures: 30 | data = random.sample(range(-2*array_size, 2*array_size), array_size) 31 | array = OneDimensionalArray(int, data) 32 | rmq = RangeQueryDynamic(array, func, data_structure=ds) 33 | for input in inputs: 34 | assert rmq.query(input[0], input[1]) == gen_expected(data, input[0], input[1]) 35 | 36 | data_copy = deepcopy(data) 37 | for _ in range(array_size//2): 38 | index = random.randint(0, array_size - 1) 39 | value = random.randint(0, 4 * array_size) 40 | data_copy[index] = value 41 | rmq.update(index, value) 42 | 43 | for input in inputs: 44 | assert rmq.query(input[0], input[1]) == gen_expected(data_copy, input[0], input[1]) 45 | 46 | def test_RangeQueryDynamic_minimum(): 47 | 48 | def _gen_minimum_expected(data, i, j): 49 | return min(data[i:j + 1]) 50 | 51 | _test_RangeQueryDynamic_common(minimum, _gen_minimum_expected) 52 | 53 | def test_RangeQueryDynamic_greatest_common_divisor(): 54 | 55 | def _gen_gcd_expected(data, i, j): 56 | if j == i: 57 | return data[i] 58 | else: 59 | expected_gcd = math.gcd(data[i], data[i + 1]) 60 | for idx in range(i + 2, j + 1): 61 | expected_gcd = math.gcd(expected_gcd, data[idx]) 62 | return expected_gcd 63 | 64 | _test_RangeQueryDynamic_common(greatest_common_divisor, _gen_gcd_expected) 65 | 66 | def test_RangeQueryDynamic_summation(): 67 | 68 | def _gen_summation_expected(data, i, j): 69 | return sum(data[i:j + 1]) 70 | 71 | return _test_RangeQueryDynamic_common(summation, _gen_summation_expected) 72 | -------------------------------------------------------------------------------- /pydatastructs/strings/tests/test_algorithms.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.strings import find 2 | 3 | import random, string 4 | 5 | def test_kmp(): 6 | _test_common_string_matching('kmp') 7 | 8 | def test_rka(): 9 | _test_common_string_matching('rabin_karp') 10 | 11 | def test_bm(): 12 | _test_common_string_matching('boyer_moore') 13 | 14 | def test_zf(): 15 | _test_common_string_matching('z_function') 16 | 17 | def _test_common_string_matching(algorithm): 18 | true_text_pattern_dictionary = { 19 | "Knuth-Morris-Pratt": "-Morris-", 20 | "abcabcabcabdabcabdabcabca": "abcabdabcabca", 21 | "aefcdfaecdaefaefcdaefeaefcdcdeae": "aefcdaefeaefcd", 22 | "aaaaaaaa": "aaa", 23 | "fullstringmatch": "fullstringmatch", 24 | "z-function": "z-fun" 25 | } 26 | for test_case_key in true_text_pattern_dictionary: 27 | text = test_case_key 28 | query = true_text_pattern_dictionary[test_case_key] 29 | positions = find(text, query, algorithm) 30 | for i in range(positions._last_pos_filled): 31 | p = positions[i] 32 | assert text[p:p + len(query)] == query 33 | 34 | false_text_pattern_dictionary = { 35 | "Knuth-Morris-Pratt": "-Pratt-", 36 | "abcabcabcabdabcabdabcabca": "qwertyuiopzxcvbnm", 37 | "aefcdfaecdaefaefcdaefeaefcdcdeae": "cdaefaefe", 38 | "fullstringmatch": "fullstrinmatch", 39 | "z-function": "function-", 40 | "abc": "", 41 | "": "abc" 42 | } 43 | 44 | for test_case_key in false_text_pattern_dictionary: 45 | text = test_case_key 46 | query = false_text_pattern_dictionary[test_case_key] 47 | positions = find(text, query, algorithm) 48 | assert positions.size == 0 49 | 50 | random.seed(1000) 51 | 52 | def gen_random_string(length): 53 | ascii = string.ascii_uppercase 54 | digits = string.digits 55 | return ''.join(random.choices(ascii + digits, k=length)) 56 | 57 | for _ in range(100): 58 | query = gen_random_string(random.randint(3, 10)) 59 | num_times = random.randint(1, 10) 60 | freq = 0 61 | text = "" 62 | while freq < num_times: 63 | rand_str = gen_random_string(random.randint(5, 10)) 64 | if rand_str != query: 65 | freq += 1 66 | text += query + rand_str + query 67 | positions = find(text, query, algorithm) 68 | assert positions._num == num_times * 2 69 | for i in range(positions._last_pos_filled): 70 | p = positions[i] 71 | assert text[p:p + len(query)] == query 72 | 73 | text = gen_random_string(len(query)) 74 | if text != query: 75 | positions = find(text, query, algorithm) 76 | assert positions.size == 0 77 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/test_stack.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.miscellaneous_data_structures import Stack 2 | from pydatastructs.miscellaneous_data_structures.stack import ArrayStack, LinkedListStack 3 | from pydatastructs.miscellaneous_data_structures._backend.cpp import _stack 4 | from pydatastructs.utils.raises_util import raises 5 | from pydatastructs.utils.misc_util import _check_type, Backend 6 | 7 | 8 | def test_Stack(): 9 | s = Stack(implementation='array') 10 | s1 = Stack() 11 | assert _check_type(s, ArrayStack) is True 12 | assert _check_type(s1, ArrayStack) is True 13 | s2 = Stack(implementation='linked_list') 14 | assert _check_type(s2, LinkedListStack) is True 15 | assert raises(NotImplementedError, lambda: Stack(implementation='')) 16 | 17 | s3 = Stack(backend=Backend.CPP) 18 | assert _check_type(s3, _stack.ArrayStack) is True 19 | s4 = Stack(implementation="array", backend=Backend.CPP) 20 | assert _check_type(s4, _stack.ArrayStack) is True 21 | 22 | def test_ArrayStack(): 23 | s = Stack(implementation='array') 24 | s.push(1) 25 | s.push(2) 26 | s.push(3) 27 | assert s.peek == 3 28 | assert str(s) == '[1, 2, 3]' 29 | assert s.pop() == 3 30 | assert s.pop() == 2 31 | assert s.pop() == 1 32 | assert s.is_empty is True 33 | assert raises(IndexError, lambda : s.pop()) 34 | _s = Stack(items=[1, 2, 3]) 35 | assert str(_s) == '[1, 2, 3]' 36 | assert len(_s) == 3 37 | 38 | # Cpp test 39 | s1 = Stack(implementation="array", backend=Backend.CPP) 40 | s1.push(1) 41 | s1.push(2) 42 | s1.push(3) 43 | assert s1.peek == 3 44 | assert str(s1) == "['1', '2', '3']" 45 | assert s1.pop() == 3 46 | assert s1.pop() == 2 47 | assert s1.pop() == 1 48 | assert s1.is_empty is True 49 | assert raises(IndexError, lambda : s1.pop()) 50 | _s1 = Stack(items=[1, 2, 3], backend=Backend.CPP) 51 | assert str(_s1) == "['1', '2', '3']" 52 | assert len(_s1) == 3 53 | 54 | def test_LinkedListStack(): 55 | s = Stack(implementation='linked_list') 56 | s.push(1) 57 | s.push(2) 58 | s.push(3) 59 | assert s.peek.key == 3 60 | assert str(s) == ("['(1, None)', '(2, None)', '(3, None)']") 61 | assert s.pop().key == 3 62 | assert s.pop().key == 2 63 | assert s.pop().key == 1 64 | assert s.is_empty is True 65 | assert raises(IndexError, lambda : s.pop()) 66 | assert str(s) == '[]' 67 | _s = Stack(implementation='linked_list',items=[1, 2, 3]) 68 | assert str(_s) == "['(1, None)', '(2, None)', '(3, None)']" 69 | assert len(_s) == 3 70 | 71 | s = Stack(implementation='linked_list',items=['a',None,type,{}]) 72 | assert len(s) == 4 73 | assert s.size == 4 74 | 75 | peek = s.peek 76 | assert peek.key == s.pop().key 77 | assert raises(TypeError, lambda: Stack(implementation='linked_list', items={0, 1})) 78 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/binomial_trees.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.utils.misc_util import ( 2 | BinomialTreeNode, _check_type, Backend, 3 | raise_if_backend_is_not_python) 4 | 5 | __all__ = [ 6 | 'BinomialTree' 7 | ] 8 | 9 | class BinomialTree(object): 10 | """ 11 | Represents binomial trees 12 | 13 | Parameters 14 | ========== 15 | 16 | root: BinomialTreeNode 17 | The root of the binomial tree. 18 | By default, None 19 | order: int 20 | The order of the binomial tree. 21 | By default, None 22 | backend: pydatastructs.Backend 23 | The backend to be used. 24 | Optional, by default, the best available 25 | backend is used. 26 | 27 | Examples 28 | ======== 29 | 30 | >>> from pydatastructs import BinomialTree, BinomialTreeNode 31 | >>> root = BinomialTreeNode(1, 1) 32 | >>> tree = BinomialTree(root, 0) 33 | >>> tree.is_empty 34 | False 35 | 36 | References 37 | ========== 38 | 39 | .. [1] https://en.wikipedia.org/wiki/Binomial_heap 40 | """ 41 | __slots__ = ['root', 'order'] 42 | 43 | def __new__(cls, root=None, order=None, **kwargs): 44 | raise_if_backend_is_not_python( 45 | cls, kwargs.get('backend', Backend.PYTHON)) 46 | if root is not None and \ 47 | not _check_type(root, BinomialTreeNode): 48 | raise TypeError("%s i.e., root should be of " 49 | "type BinomialTreeNode."%(root)) 50 | if order is not None and not _check_type(order, int): 51 | raise TypeError("%s i.e., order should be of " 52 | "type int."%(order)) 53 | obj = object.__new__(cls) 54 | if root is not None: 55 | root.is_root = True 56 | obj.root = root 57 | obj.order = order 58 | return obj 59 | 60 | @classmethod 61 | def methods(cls): 62 | return ['add_sub_tree', '__new__', 'is_empty'] 63 | 64 | def add_sub_tree(self, other_tree): 65 | """ 66 | Adds a sub tree to current tree. 67 | 68 | Parameters 69 | ========== 70 | 71 | other_tree: BinomialTree 72 | 73 | Raises 74 | ====== 75 | 76 | ValueError: If order of the two trees 77 | are different. 78 | """ 79 | if not _check_type(other_tree, BinomialTree): 80 | raise TypeError("%s i.e., other_tree should be of " 81 | "type BinomialTree"%(other_tree)) 82 | if self.order != other_tree.order: 83 | raise ValueError("Orders of both the trees should be same.") 84 | self.root.children.append(other_tree.root) 85 | other_tree.root.parent = self.root 86 | other_tree.root.is_root = False 87 | self.order += 1 88 | 89 | @property 90 | def is_empty(self): 91 | return self.root is None 92 | -------------------------------------------------------------------------------- /pydatastructs/utils/testing_util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pathlib 3 | import glob 4 | import types 5 | 6 | __all__ = ['test'] 7 | 8 | 9 | # Root pydatastructs directory 10 | ROOT_DIR = pathlib.Path(os.path.abspath(__file__)).parents[1] 11 | 12 | 13 | SKIP_FILES = ['testing_util.py'] 14 | 15 | def test(submodules=None, only_benchmarks=False, 16 | benchmarks_size=1000, **kwargs): 17 | """ 18 | Runs the library tests using pytest 19 | 20 | Parameters 21 | ========== 22 | 23 | submodules: Optional, list[str] 24 | List of submodules test to run. By default runs 25 | all the tests 26 | """ 27 | try: 28 | import pytest 29 | except ImportError: 30 | raise Exception("pytest must be installed. Use `pip install pytest` " 31 | "to install it.") 32 | 33 | # set benchmarks size 34 | os.environ["PYDATASTRUCTS_BENCHMARK_SIZE"] = str(benchmarks_size) 35 | test_files = [] 36 | if submodules: 37 | if not isinstance(submodules, (list, tuple)): 38 | submodules = [submodules] 39 | for path in glob.glob(f'{ROOT_DIR}/**/test_*.py', recursive=True): 40 | skip_test = False 41 | for skip in SKIP_FILES: 42 | if skip in path: 43 | skip_test = True 44 | break 45 | if skip_test: 46 | continue 47 | for sub_var in submodules: 48 | if isinstance(sub_var, types.ModuleType): 49 | sub = sub_var.__name__.split('.')[-1] 50 | elif isinstance(sub_var, str): 51 | sub = sub_var 52 | else: 53 | raise Exception("Submodule should be of type: str or module") 54 | if sub in path: 55 | if not only_benchmarks: 56 | if 'benchmarks' not in path: 57 | test_files.append(path) 58 | else: 59 | if 'benchmarks' in path: 60 | test_files.append(path) 61 | break 62 | else: 63 | for path in glob.glob(f'{ROOT_DIR}/**/test_*.py', recursive=True): 64 | skip_test = False 65 | for skip in SKIP_FILES: 66 | if skip in path: 67 | skip_test = True 68 | break 69 | if skip_test: 70 | continue 71 | if not only_benchmarks: 72 | if 'benchmarks' not in path: 73 | test_files.append(path) 74 | else: 75 | if 'benchmarks' in path: 76 | test_files.append(path) 77 | 78 | extra_args = [] 79 | if kwargs.get("n", False) is not False: 80 | extra_args.append("-n") 81 | extra_args.append(str(kwargs["n"])) 82 | 83 | pytest.main(extra_args + test_files) 84 | -------------------------------------------------------------------------------- /pydatastructs/trees/_backend/cpp/trees.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "BinaryTree.hpp" 3 | #include "BinarySearchTree.hpp" 4 | #include "BinaryTreeTraversal.hpp" 5 | #include "SelfBalancingBinaryTree.hpp" 6 | #include "RedBlackTree.hpp" 7 | #include "BinaryIndexedTree.hpp" 8 | #include "SplayTree.hpp" 9 | #include "AVLTree.hpp" 10 | #include "CartesianTree.hpp" 11 | #include "Treap.hpp" 12 | 13 | static struct PyModuleDef trees_struct = { 14 | PyModuleDef_HEAD_INIT, 15 | "_trees", 16 | 0, 17 | -1, 18 | NULL, 19 | }; 20 | 21 | PyMODINIT_FUNC PyInit__trees(void) { 22 | Py_Initialize(); 23 | PyObject *trees = PyModule_Create(&trees_struct); 24 | 25 | if (PyType_Ready(&BinaryTreeType) < 0) { 26 | return NULL; 27 | } 28 | Py_INCREF(&BinaryTreeType); 29 | PyModule_AddObject(trees, "BinaryTree", reinterpret_cast(&BinaryTreeType)); 30 | 31 | if (PyType_Ready(&BinarySearchTreeType) < 0) { 32 | return NULL; 33 | } 34 | Py_INCREF(&BinarySearchTreeType); 35 | PyModule_AddObject(trees, "BinarySearchTree", reinterpret_cast(&BinarySearchTreeType)); 36 | 37 | if (PyType_Ready(&BinaryTreeTraversalType) < 0) { 38 | return NULL; 39 | } 40 | Py_INCREF(&BinaryTreeTraversalType); 41 | PyModule_AddObject(trees, "BinaryTreeTraversal", reinterpret_cast(&BinaryTreeTraversalType)); 42 | 43 | if (PyType_Ready(&SelfBalancingBinaryTreeType) < 0) { 44 | return NULL; 45 | } 46 | Py_INCREF(&SelfBalancingBinaryTreeType); 47 | PyModule_AddObject(trees, "SelfBalancingBinaryTree", reinterpret_cast(&SelfBalancingBinaryTreeType)); 48 | 49 | if (PyType_Ready(&RedBlackTreeType) < 0) { 50 | return NULL; 51 | } 52 | Py_INCREF(&RedBlackTreeType); 53 | PyModule_AddObject(trees, "RedBlackTree", reinterpret_cast(&RedBlackTreeType)); 54 | 55 | if (PyType_Ready(&BinaryIndexedTreeType) < 0) { 56 | return NULL; 57 | } 58 | Py_INCREF(&BinaryIndexedTreeType); 59 | PyModule_AddObject(trees, "BinaryIndexedTree", reinterpret_cast(&BinaryIndexedTreeType)); 60 | 61 | if (PyType_Ready(&SplayTreeType) < 0) { 62 | return NULL; 63 | } 64 | Py_INCREF(&SplayTreeType); 65 | PyModule_AddObject(trees, "SplayTree", reinterpret_cast(&SplayTreeType)); 66 | 67 | if (PyType_Ready(&AVLTreeType) < 0) { 68 | return NULL; 69 | } 70 | Py_INCREF(&AVLTreeType); 71 | PyModule_AddObject(trees, "AVLTree", reinterpret_cast(&AVLTreeType)); 72 | 73 | if (PyType_Ready(&CartesianTreeType) < 0) { 74 | return NULL; 75 | } 76 | Py_INCREF(&CartesianTreeType); 77 | PyModule_AddObject(trees, "CartesianTree", reinterpret_cast(&CartesianTreeType)); 78 | 79 | if (PyType_Ready(&TreapType) < 0) { 80 | return NULL; 81 | } 82 | Py_INCREF(&TreapType); 83 | PyModule_AddObject(trees, "Treap", reinterpret_cast(&TreapType)); 84 | 85 | return trees; 86 | } 87 | -------------------------------------------------------------------------------- /pydatastructs/graphs/_backend/cpp/graph.cpp: -------------------------------------------------------------------------------- 1 | #define PY_SSIZE_T_CLEAN 2 | #include 3 | #include "AdjacencyList.hpp" 4 | #include "AdjacencyMatrix.hpp" 5 | #include "AdjacencyListGraphNode.hpp" 6 | #include "AdjacencyMatrixGraphNode.hpp" 7 | #include "GraphEdge.hpp" 8 | #include "GraphNode.hpp" 9 | #include "graph_bindings.hpp" 10 | #include "Algorithms.hpp" 11 | 12 | static PyMethodDef GraphMethods[] = { 13 | {"bfs_adjacency_list", (PyCFunction)breadth_first_search_adjacency_list, METH_VARARGS | METH_KEYWORDS, "Run BFS on adjacency list with callback"}, 14 | {"bfs_adjacency_matrix", (PyCFunction)breadth_first_search_adjacency_matrix, METH_VARARGS | METH_KEYWORDS, "Run BFS on adjacency matrix with callback"}, 15 | {"minimum_spanning_tree_prim_adjacency_list", (PyCFunction)minimum_spanning_tree_prim_adjacency_list, METH_VARARGS | METH_KEYWORDS, "Run Prim's algorithm on adjacency list"}, 16 | {"shortest_paths_dijkstra_adjacency_list", (PyCFunction)shortest_paths_dijkstra_adjacency_list, METH_VARARGS | METH_KEYWORDS, "Dijkstra's algorithm for adjacency list graphs"}, 17 | {NULL, NULL, 0, NULL} 18 | }; 19 | 20 | static struct PyModuleDef graph_module = { 21 | PyModuleDef_HEAD_INIT, 22 | "_graph", 23 | "C++ module for graphs", 24 | -1, 25 | GraphMethods, 26 | }; 27 | 28 | PyMODINIT_FUNC PyInit__graph(void) { 29 | PyObject* m; 30 | 31 | if (PyType_Ready(&GraphNodeType) < 0) 32 | return NULL; 33 | 34 | if (PyType_Ready(&AdjacencyListGraphNodeType) < 0) 35 | return NULL; 36 | 37 | if (PyType_Ready(&AdjacencyMatrixGraphNodeType) < 0) 38 | return NULL; 39 | 40 | if (PyType_Ready(&GraphEdgeType) < 0) 41 | return NULL; 42 | 43 | if (PyType_Ready(&AdjacencyListGraphType) < 0) 44 | return NULL; 45 | 46 | if (PyType_Ready(&AdjacencyMatrixGraphType) < 0) 47 | return NULL; 48 | 49 | m = PyModule_Create(&graph_module); 50 | if (m == NULL) 51 | return NULL; 52 | 53 | Py_INCREF(&GraphNodeType); 54 | PyModule_AddObject(m, "GraphNode", (PyObject*)&GraphNodeType); 55 | 56 | Py_INCREF(&AdjacencyListGraphNodeType); 57 | PyModule_AddObject(m, "AdjacencyListGraphNode", (PyObject*)&AdjacencyListGraphNodeType); 58 | 59 | Py_INCREF(&AdjacencyMatrixGraphNodeType); 60 | PyModule_AddObject(m, "AdjacencyMatrixGraphNode", (PyObject*)&AdjacencyMatrixGraphNodeType); 61 | 62 | Py_INCREF(&GraphEdgeType); 63 | PyModule_AddObject(m, "GraphEdge", (PyObject*)&GraphEdgeType); 64 | 65 | Py_INCREF(&AdjacencyListGraphType); 66 | if (PyModule_AddObject(m, "AdjacencyListGraph", (PyObject*)&AdjacencyListGraphType) < 0) { 67 | Py_DECREF(&AdjacencyListGraphType); 68 | Py_DECREF(m); 69 | return NULL; 70 | } 71 | 72 | Py_INCREF(&AdjacencyMatrixGraphType); 73 | if (PyModule_AddObject(m, "AdjacencyMatrixGraph", (PyObject*)&AdjacencyMatrixGraphType) < 0) { 74 | Py_DECREF(&AdjacencyMatrixGraphType); 75 | Py_DECREF(m); 76 | return NULL; 77 | } 78 | 79 | return m; 80 | } 81 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. PyDataStructs documentation master file, created by 2 | sphinx-quickstart on Sun Oct 17 19:57:08 2021. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to PyDataStructs's documentation! 7 | ========================================= 8 | 9 | This project aims to be a Python package for various data 10 | structures in computer science. We are also working on the 11 | development of algorithms including their parallel implementations. 12 | To the best of our knowledge, a well-designed library/package which 13 | has covered most of the data structures and algorithms doesn't exist yet. 14 | 15 | Once the software design becomes more stable after a few releases of 16 | this package in the near future, we also aim to provide APIs for the 17 | code in C++ and Java as well. 18 | 19 | .. note:: 20 | 21 | This project is under active development and contributions are welcome. 22 | 23 | Installation 24 | ============ 25 | 26 | After changing your directory to project root, you can 27 | install the package by running the following command, 28 | 29 | ``python scripts/build/install.py`` 30 | 31 | For development purposes, i.e., if you intend to be a contributor, 32 | 33 | ``python scripts/build/develop.py`` 34 | 35 | For building documentation execute the following commands one after 36 | the other, 37 | 38 | 1. ``pip install -r docs/requirements.txt`` 39 | 2. ``sphinx-build -b html docs/source/ docs/build/html`` 40 | 41 | Make sure that your python version is at least ``3.8``. 42 | 43 | Why PyDataStructs? 44 | ================== 45 | 46 | 1. **Single package for all your data structures and algorithms** - We have and are 47 | implementing many popular and useful data structures and algorithms. 48 | 49 | 2. **Consistent and Clean Interface** - The APIs we have provided are **consistent** with each other, 50 | **clean** and **easy to use**. We make sure of that before adding any new data structure or algorithm. 51 | 52 | 3. **Well Tested** - We thoroughly test our code before making any new addition to PyDataStructs. 53 | **99 percent** lines of our code have already been tested by us. 54 | 55 | So, **you can easily rely on PyDataStructs** for any data structure or algorithm you want to use 56 | **without worrying about implementing** it **from scratch**. Everything is just a few calls away. 57 | 58 | Why do we use Python? 59 | ===================== 60 | 61 | As we know Python is an interpreted language and hence is 62 | slow compared to C++, the most popular language for competitive programming. 63 | We still decided to use Python because the software development can happen 64 | at a much faster pace and it is much easier to test various software designs 65 | and APIs as coding them out takes no time. However, keeping the need of the 66 | users in mind, we will shift to C++ backend, which will happen quickly as 67 | we would be required to just translate the tested code rather than writing it 68 | from scratch, after a few releases with APIs available for all the languages. 69 | 70 | Contents 71 | ======== 72 | 73 | .. toctree:: 74 | :maxdepth: 1 75 | 76 | tutorials.rst 77 | pydatastructs_sphinx_graphs 78 | contributing.rst 79 | authors.rst 80 | pydatastructs/pydatastructs.rst 81 | -------------------------------------------------------------------------------- /pydatastructs/utils/tests/test_misc_util.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.utils import (TreeNode, AdjacencyListGraphNode, AdjacencyMatrixGraphNode, 2 | GraphEdge, BinomialTreeNode, MAryTreeNode, CartesianTreeNode, RedBlackTreeNode, SkipNode) 3 | from pydatastructs.utils.raises_util import raises 4 | from pydatastructs.utils.misc_util import Backend 5 | 6 | def test_cpp_TreeNode(): 7 | n = TreeNode(1,100,backend=Backend.CPP) 8 | assert str(n) == "(None, 1, 100, None)" 9 | 10 | def test_AdjacencyListGraphNode(): 11 | g_1 = AdjacencyListGraphNode('g_1', 1) 12 | g_2 = AdjacencyListGraphNode('g_2', 2) 13 | g = AdjacencyListGraphNode('g', 0, adjacency_list=[g_1, g_2]) 14 | g.add_adjacent_node('g_3', 3) 15 | assert g.g_1.name == 'g_1' 16 | assert g.g_2.name == 'g_2' 17 | assert g.g_3.name == 'g_3' 18 | g.remove_adjacent_node('g_3') 19 | assert hasattr(g, 'g_3') is False 20 | assert raises(ValueError, lambda: g.remove_adjacent_node('g_3')) 21 | g.add_adjacent_node('g_1', 4) 22 | assert g.g_1.data == 4 23 | assert str(g) == "('g', 0)" 24 | 25 | h_1 = AdjacencyListGraphNode('h_1', 1, backend = Backend.CPP) 26 | h_2 = AdjacencyListGraphNode('h_2', 2, backend = Backend.CPP) 27 | assert str(h_1) == "('h_1', 1)" 28 | h = AdjacencyListGraphNode('h', 0, adjacency_list = [h_1, h_2], backend = Backend.CPP) 29 | h.add_adjacent_node('h_3', 3) 30 | assert h.adjacent['h_1'].name == 'h_1' 31 | assert h.adjacent['h_2'].name == 'h_2' 32 | assert h.adjacent['h_3'].name == 'h_3' 33 | h.remove_adjacent_node('h_3') 34 | assert 'h_3' not in h.adjacent 35 | assert raises(ValueError, lambda: h.remove_adjacent_node('h_3')) 36 | h.add_adjacent_node('h_1', 4) 37 | assert h.adjacent['h_1'] == 4 38 | assert str(h) == "('h', 0)" 39 | h_5 = AdjacencyListGraphNode('h_5', h_1, backend = Backend.CPP) 40 | assert h_5.data == h_1 41 | 42 | def test_AdjacencyMatrixGraphNode(): 43 | g = AdjacencyMatrixGraphNode("1", 3) 44 | g2 = AdjacencyMatrixGraphNode("1", 3, backend = Backend.CPP) 45 | assert str(g) == "('1', 3)" 46 | assert str(g2) == "('1', 3)" 47 | g3 = AdjacencyListGraphNode("3", g2, backend = Backend.CPP) 48 | assert g3.data == g2 49 | 50 | 51 | def test_GraphEdge(): 52 | g_1 = AdjacencyListGraphNode('g_1', 1) 53 | g_2 = AdjacencyListGraphNode('g_2', 2) 54 | e = GraphEdge(g_1, g_2, value=2) 55 | assert str(e) == "('g_1', 'g_2')" 56 | 57 | h_1 = AdjacencyListGraphNode('h_1', 1, backend = Backend.CPP) 58 | h_2 = AdjacencyListGraphNode('h_2', 2, backend = Backend.CPP) 59 | e2 = GraphEdge(h_1, h_2, value = 2, backend = Backend.CPP) 60 | assert str(e2) == "('h_1', 'h_2', 2)" 61 | 62 | def test_BinomialTreeNode(): 63 | b = BinomialTreeNode(1,1) 64 | b.add_children(*[BinomialTreeNode(i,i) for i in range(2,10)]) 65 | assert str(b) == '(1, 1)' 66 | assert str(b.children) == "['(2, 2)', '(3, 3)', '(4, 4)', '(5, 5)', '(6, 6)', '(7, 7)', '(8, 8)', '(9, 9)']" 67 | 68 | def test_MAryTreeNode(): 69 | m = MAryTreeNode(1, 1) 70 | m.add_children(*list(range(2, 10))) 71 | assert str(m) == "(1, 1)" 72 | assert str(m.children) == "['2', '3', '4', '5', '6', '7', '8', '9']" 73 | 74 | def test_CartesianTreeNode(): 75 | c = CartesianTreeNode(1, 1, 1) 76 | assert str(c) == "(None, 1, 1, 1, None)" 77 | 78 | def test_RedBlackTreeNode(): 79 | c = RedBlackTreeNode(1, 1) 80 | assert str(c) == "(None, 1, 1, None)" 81 | 82 | def test_SkipNode(): 83 | c = SkipNode(1) 84 | assert str(c) == '(1, None)' 85 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity, and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at czgdp1807@gmail.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /pydatastructs/graphs/tests/test_adjacency_list.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.graphs import Graph 2 | from pydatastructs.utils import AdjacencyListGraphNode 3 | from pydatastructs.utils.raises_util import raises 4 | from pydatastructs.utils.misc_util import Backend 5 | 6 | def test_adjacency_list(): 7 | v_1 = AdjacencyListGraphNode('v_1', 1) 8 | v_2 = AdjacencyListGraphNode('v_2', 2) 9 | g = Graph(v_1, v_2, implementation='adjacency_list') 10 | v_3 = AdjacencyListGraphNode('v_3', 3) 11 | g.add_vertex(v_2) 12 | g.add_vertex(v_3) 13 | g.add_edge('v_1', 'v_2') 14 | g.add_edge('v_2', 'v_3') 15 | g.add_edge('v_3', 'v_1') 16 | assert g.is_adjacent('v_1', 'v_2') is True 17 | assert g.is_adjacent('v_2', 'v_3') is True 18 | assert g.is_adjacent('v_3', 'v_1') is True 19 | assert g.is_adjacent('v_2', 'v_1') is False 20 | assert g.is_adjacent('v_3', 'v_2') is False 21 | assert g.is_adjacent('v_1', 'v_3') is False 22 | neighbors = g.neighbors('v_1') 23 | assert neighbors == [v_2] 24 | v = AdjacencyListGraphNode('v', 4) 25 | g.add_vertex(v) 26 | g.add_edge('v_1', 'v', 0) 27 | g.add_edge('v_2', 'v', 0) 28 | g.add_edge('v_3', 'v', 0) 29 | assert g.is_adjacent('v_1', 'v') is True 30 | assert g.is_adjacent('v_2', 'v') is True 31 | assert g.is_adjacent('v_3', 'v') is True 32 | e1 = g.get_edge('v_1', 'v') 33 | e2 = g.get_edge('v_2', 'v') 34 | e3 = g.get_edge('v_3', 'v') 35 | assert (e1.source.name, e1.target.name) == ('v_1', 'v') 36 | assert (e2.source.name, e2.target.name) == ('v_2', 'v') 37 | assert (e3.source.name, e3.target.name) == ('v_3', 'v') 38 | g.remove_edge('v_1', 'v') 39 | assert g.is_adjacent('v_1', 'v') is False 40 | g.remove_vertex('v') 41 | assert g.is_adjacent('v_2', 'v') is False 42 | assert g.is_adjacent('v_3', 'v') is False 43 | 44 | assert raises(ValueError, lambda: g.add_edge('u', 'v')) 45 | assert raises(ValueError, lambda: g.add_edge('v', 'x')) 46 | 47 | v_4 = AdjacencyListGraphNode('v_4', 4, backend = Backend.CPP) 48 | v_5 = AdjacencyListGraphNode('v_5', 5, backend = Backend.CPP) 49 | g2 = Graph(v_4,v_5,implementation = 'adjacency_list', backend = Backend.CPP) 50 | v_6 = AdjacencyListGraphNode('v_6', 6, backend = Backend.CPP) 51 | assert raises(ValueError, lambda: g2.add_vertex(v_5)) 52 | g2.add_vertex(v_6) 53 | g2.add_edge('v_4', 'v_5') 54 | g2.add_edge('v_5', 'v_6') 55 | g2.add_edge('v_4', 'v_6') 56 | assert g2.is_adjacent('v_4', 'v_5') is True 57 | assert g2.is_adjacent('v_5', 'v_6') is True 58 | assert g2.is_adjacent('v_4', 'v_6') is True 59 | assert g2.is_adjacent('v_5', 'v_4') is False 60 | assert g2.is_adjacent('v_6', 'v_5') is False 61 | assert g2.is_adjacent('v_6', 'v_4') is False 62 | assert g2.num_edges() == 3 63 | assert g2.num_vertices() == 3 64 | neighbors = g2.neighbors('v_4') 65 | assert set(neighbors) == {v_6, v_5} 66 | v = AdjacencyListGraphNode('v', 4, backend = Backend.CPP) 67 | g2.add_vertex(v) 68 | g2.add_edge('v_4', 'v', 0) 69 | g2.add_edge('v_5', 'v', 0) 70 | g2.add_edge('v_6', 'v', "h") 71 | assert g2.is_adjacent('v_4', 'v') is True 72 | assert g2.is_adjacent('v_5', 'v') is True 73 | assert g2.is_adjacent('v_6', 'v') is True 74 | e1 = g2.get_edge('v_4', 'v') 75 | e2 = g2.get_edge('v_5', 'v') 76 | e3 = g2.get_edge('v_6', 'v') 77 | assert (str(e1)) == "('v_4', 'v', 0)" 78 | assert (str(e2)) == "('v_5', 'v', 0)" 79 | assert (str(e3)) == "('v_6', 'v', h)" 80 | g2.remove_edge('v_4', 'v') 81 | assert g2.is_adjacent('v_4', 'v') is False 82 | g2.remove_vertex('v') 83 | assert raises(ValueError, lambda: g2.add_edge('v_4', 'v')) 84 | -------------------------------------------------------------------------------- /pydatastructs/trees/tests/benchmarks/test_binary_trees.py: -------------------------------------------------------------------------------- 1 | import timeit, functools, os, pytest 2 | from pydatastructs.trees.binary_trees import (BinarySearchTree, RedBlackTree) 3 | from pydatastructs.utils.misc_util import Backend 4 | 5 | @pytest.mark.xfail 6 | def test_BinarySearchTree(**kwargs): 7 | cpp = Backend.CPP 8 | repeat = 1 9 | number = 1 10 | 11 | size = int(os.environ.get("PYDATASTRUCTS_BENCHMARK_SIZE", "1000")) 12 | size = kwargs.get("size", size) 13 | 14 | BST = BinarySearchTree 15 | b1 = BST(backend=Backend.PYTHON) 16 | b2 = BST(backend=Backend.CPP) 17 | 18 | def f(backend, tree): 19 | for node in range(-1000,1000): 20 | tree.insert(node, node) 21 | def g(backend, tree): 22 | for node in range(-1000, 1000): 23 | tree.search(node) 24 | def h(backend, tree): 25 | for node in range(-1000, 1000): 26 | tree.delete(node) 27 | 28 | kwds_dict_PY = {"backend": Backend.PYTHON, "tree":b1} 29 | kwds_dict_CPP = {"backend": Backend.CPP, "tree":b2} 30 | 31 | timer_python = timeit.Timer(functools.partial(f, **kwds_dict_PY)) 32 | python_insert = min(timer_python.repeat(repeat, number)) 33 | 34 | timer_cpp = timeit.Timer(functools.partial(f, **kwds_dict_CPP)) 35 | cpp_insert = min(timer_cpp.repeat(repeat, number)) 36 | assert cpp_insert < python_insert 37 | 38 | timer_python = timeit.Timer(functools.partial(g, **kwds_dict_PY)) 39 | python_search = min(timer_python.repeat(repeat, number)) 40 | 41 | timer_cpp = timeit.Timer(functools.partial(g, **kwds_dict_CPP)) 42 | cpp_search = min(timer_cpp.repeat(repeat, number)) 43 | assert cpp_search < python_search 44 | 45 | timer_python = timeit.Timer(functools.partial(h, **kwds_dict_PY)) 46 | python_delete = min(timer_python.repeat(repeat, number)) 47 | 48 | timer_cpp = timeit.Timer(functools.partial(h, **kwds_dict_CPP)) 49 | cpp_delete = min(timer_cpp.repeat(repeat, number)) 50 | assert cpp_delete < python_delete 51 | 52 | @pytest.mark.xfail 53 | def test_RedBlackTree(**kwargs): 54 | cpp = Backend.CPP 55 | repeat = 1 56 | number = 1 57 | 58 | size = int(os.environ.get("PYDATASTRUCTS_BENCHMARK_SIZE", "1000")) 59 | size = kwargs.get("size", size) 60 | 61 | RBT = RedBlackTree 62 | b1 = RBT(backend=Backend.PYTHON) 63 | b2 = RBT(backend=Backend.CPP) 64 | 65 | def f(backend, tree): 66 | for node in range(-1000,1000): 67 | tree.insert(node, node) 68 | 69 | def g(backend, tree): 70 | for node in range(-1000, 1000): 71 | tree.search(node) 72 | def h(backend, tree): 73 | for node in range(-1000, 1000): 74 | tree.delete(node) 75 | 76 | kwds_dict_PY = {"backend": Backend.PYTHON, "tree":b1} 77 | kwds_dict_CPP = {"backend": Backend.CPP, "tree":b2} 78 | 79 | timer_python = timeit.Timer(functools.partial(f, **kwds_dict_PY)) 80 | python_insert = min(timer_python.repeat(repeat, number)) 81 | 82 | timer_cpp = timeit.Timer(functools.partial(f, **kwds_dict_CPP)) 83 | cpp_insert = min(timer_cpp.repeat(repeat, number)) 84 | assert cpp_insert < python_insert 85 | 86 | timer_python = timeit.Timer(functools.partial(g, **kwds_dict_PY)) 87 | python_search = min(timer_python.repeat(repeat, number)) 88 | 89 | timer_cpp = timeit.Timer(functools.partial(g, **kwds_dict_CPP)) 90 | cpp_search = min(timer_cpp.repeat(repeat, number)) 91 | assert cpp_search < python_search 92 | 93 | timer_python = timeit.Timer(functools.partial(h, **kwds_dict_PY)) 94 | python_delete = min(timer_python.repeat(repeat, number)) 95 | 96 | timer_cpp = timeit.Timer(functools.partial(h, **kwds_dict_CPP)) 97 | cpp_delete = min(timer_cpp.repeat(repeat, number)) 98 | assert cpp_delete < python_delete 99 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/sparse_table.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.linear_data_structures.arrays import OneDimensionalArray 2 | from pydatastructs.utils.misc_util import ( 3 | Backend, raise_if_backend_is_not_python) 4 | import math 5 | 6 | __all__ = ['SparseTable'] 7 | 8 | 9 | class SparseTable(object): 10 | """ 11 | Represents the sparse table data structure. 12 | 13 | Parameters 14 | ========== 15 | 16 | array: OneDimensionalArray 17 | The array to be used for filling the sparse table. 18 | func: callable 19 | The function to be used for filling the sparse table. 20 | It should accept only one tuple as an argument. The 21 | size of the tuple will be either 1 or 2 and any one 22 | of the elements can be `None`. You can treat `None` in 23 | whatever way you want. For example, in case of minimum 24 | values, `None` can be treated as infinity. We provide 25 | the following which can be used as an argument value for this 26 | parameter, 27 | 28 | `minimum` - For range minimum queries. 29 | 30 | `greatest_common_divisor` - For queries finding greatest 31 | common divisor of a range. 32 | 33 | `summation` - For range sum queries. 34 | backend: pydatastructs.Backend 35 | The backend to be used. 36 | Optional, by default, the best available 37 | backend is used. 38 | 39 | Examples 40 | ======== 41 | 42 | >>> from pydatastructs import SparseTable, minimum 43 | >>> from pydatastructs import OneDimensionalArray 44 | >>> arr = OneDimensionalArray(int, [1, 2, 3, 4, 5]) 45 | >>> s_t = SparseTable(arr, minimum) 46 | >>> str(s_t) 47 | "['[1, 1, 1]', '[2, 2, 2]', '[3, 3, None]', '[4, 4, None]', '[5, None, None]']" 48 | 49 | References 50 | ========== 51 | 52 | .. [1] https://cp-algorithms.com/data_structures/sparse-table.html 53 | """ 54 | 55 | __slots__ = ['_table', 'func'] 56 | 57 | def __new__(cls, array, func, **kwargs): 58 | raise_if_backend_is_not_python( 59 | cls, kwargs.get('backend', Backend.PYTHON)) 60 | 61 | # TODO: If possible remove the following check. 62 | if len(array) == 0: 63 | raise ValueError("Input %s array is empty."%(array)) 64 | 65 | obj = object.__new__(cls) 66 | size = len(array) 67 | log_size = int(math.log2(size)) + 1 68 | obj._table = [OneDimensionalArray(int, log_size) for _ in range(size)] 69 | obj.func = func 70 | 71 | for i in range(size): 72 | obj._table[i][0] = func((array[i],)) 73 | 74 | for j in range(1, log_size + 1): 75 | for i in range(size - (1 << j) + 1): 76 | obj._table[i][j] = func((obj._table[i][j - 1], 77 | obj._table[i + (1 << (j - 1))][j - 1])) 78 | 79 | return obj 80 | 81 | @classmethod 82 | def methods(cls): 83 | return ['query', '__str__'] 84 | 85 | def query(self, start, end): 86 | """ 87 | Method to perform a query on sparse table in [start, end) 88 | range. 89 | 90 | Parameters 91 | ========== 92 | 93 | start: int 94 | The starting index of the range. 95 | end: int 96 | The ending index of the range. 97 | """ 98 | j = int(math.log2(end - start + 1)) + 1 99 | answer = None 100 | while j >= 0: 101 | if start + (1 << j) - 1 <= end: 102 | answer = self.func((answer, self._table[start][j])) 103 | start += 1 << j 104 | j -= 1 105 | return answer 106 | 107 | def __str__(self): 108 | return str([str(array) for array in self._table]) 109 | -------------------------------------------------------------------------------- /pydatastructs/graphs/adjacency_matrix.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.graphs.graph import Graph 2 | from pydatastructs.graphs._backend.cpp import _graph 3 | from pydatastructs.utils.misc_util import ( 4 | GraphEdge, raise_if_backend_is_not_python, 5 | Backend) 6 | 7 | __all__ = [ 8 | 'AdjacencyMatrix' 9 | ] 10 | 11 | class AdjacencyMatrix(Graph): 12 | """ 13 | Adjacency matrix implementation of graphs. 14 | 15 | See also 16 | ======== 17 | 18 | pydatastructs.graphs.graph.Graph 19 | """ 20 | def __new__(cls, *vertices, **kwargs): 21 | backend = kwargs.get('backend', Backend.PYTHON) 22 | if backend == Backend.PYTHON: 23 | obj = object.__new__(cls) 24 | obj.vertices = [vertex.name for vertex in vertices] 25 | for vertex in vertices: 26 | obj.__setattr__(vertex.name, vertex) 27 | obj.matrix = {} 28 | for vertex in vertices: 29 | obj.matrix[vertex.name] = {} 30 | obj.edge_weights = {} 31 | obj._impl = 'adjacency_matrix' 32 | return obj 33 | else: 34 | return _graph.AdjacencyMatrixGraph(vertices) 35 | 36 | @classmethod 37 | def methods(self): 38 | return ['is_adjacent', 'neighbors', 39 | 'add_edge', 'get_edge', 'remove_edge', 40 | '__new__'] 41 | 42 | def is_adjacent(self, node1, node2): 43 | node1, node2 = str(node1), str(node2) 44 | row = self.matrix.get(node1, {}) 45 | return row.get(node2, False) is not False 46 | 47 | def num_vertices(self): 48 | return len(self.vertices) 49 | 50 | def num_edges(self): 51 | return sum(len(v) for v in self.matrix.values()) 52 | 53 | def neighbors(self, node): 54 | node = str(node) 55 | neighbors = [] 56 | row = self.matrix.get(node, {}) 57 | for node, presence in row.items(): 58 | if presence: 59 | neighbors.append(self.__getattribute__( 60 | str(node))) 61 | return neighbors 62 | 63 | def add_vertex(self, node): 64 | raise NotImplementedError("Currently we allow " 65 | "adjacency matrix for static graphs only") 66 | 67 | def remove_vertex(self, node): 68 | raise NotImplementedError("Currently we allow " 69 | "adjacency matrix for static graphs only.") 70 | 71 | def add_edge(self, source, target, cost=None): 72 | source, target = str(source), str(target) 73 | error_msg = ("Vertex %s is not present in the graph." 74 | "Call Graph.add_vertex to add a new" 75 | "vertex. Graph.add_edge is only responsible" 76 | "for adding edges and it will not add new" 77 | "vertices on its own. This is done to maintain" 78 | "clear separation between the functionality of" 79 | "these two methods.") 80 | if source not in self.matrix: 81 | raise ValueError(error_msg % (source)) 82 | if target not in self.matrix: 83 | raise ValueError(error_msg % (target)) 84 | 85 | self.matrix[source][target] = True 86 | if cost is not None: 87 | self.edge_weights[source + "_" + target] = \ 88 | GraphEdge(self.__getattribute__(source), 89 | self.__getattribute__(target), 90 | cost) 91 | 92 | def get_edge(self, source, target): 93 | return self.edge_weights.get( 94 | str(source) + "_" + str(target), 95 | None) 96 | 97 | def remove_edge(self, source, target): 98 | source, target = str(source), str(target) 99 | self.matrix[source][target] = False 100 | self.edge_weights.pop(str(source) + "_" + str(target), None) 101 | -------------------------------------------------------------------------------- /pydatastructs/graphs/adjacency_list.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.graphs.graph import Graph 2 | from pydatastructs.graphs._backend.cpp import _graph 3 | from pydatastructs.utils.misc_util import ( 4 | GraphEdge, Backend, raise_if_backend_is_not_python) 5 | 6 | __all__ = [ 7 | 'AdjacencyList' 8 | ] 9 | 10 | class AdjacencyList(Graph): 11 | """ 12 | Adjacency list implementation of graphs. 13 | 14 | See also 15 | ======== 16 | 17 | pydatastructs.graphs.graph.Graph 18 | """ 19 | def __new__(cls, *vertices, **kwargs): 20 | 21 | backend = kwargs.get('backend', Backend.PYTHON) 22 | if backend == Backend.PYTHON: 23 | obj = object.__new__(cls) 24 | for vertex in vertices: 25 | obj.__setattr__(vertex.name, vertex) 26 | obj.vertices = [vertex.name for vertex in vertices] 27 | obj.edge_weights = {} 28 | obj._impl = 'adjacency_list' 29 | return obj 30 | else: 31 | graph = _graph.AdjacencyListGraph() 32 | for vertice in vertices: 33 | graph.add_vertex(vertice) 34 | return graph 35 | 36 | @classmethod 37 | def methods(self): 38 | return ['is_adjacent', 'neighbors', 39 | 'add_vertex', 'remove_vertex', 'add_edge', 40 | 'get_edge', 'remove_edge', '__new__'] 41 | 42 | def is_adjacent(self, node1, node2): 43 | node1 = self.__getattribute__(node1) 44 | return hasattr(node1, node2) 45 | 46 | def num_vertices(self): 47 | return len(self.vertices) 48 | 49 | def num_edges(self): 50 | return sum(len(self.neighbors(v)) for v in self.vertices) 51 | 52 | def neighbors(self, node): 53 | node = self.__getattribute__(node) 54 | return [self.__getattribute__(name) for name in node.adjacent] 55 | 56 | def add_vertex(self, node): 57 | if not hasattr(self, node.name): 58 | self.vertices.append(node.name) 59 | self.__setattr__(node.name, node) 60 | 61 | def remove_vertex(self, name): 62 | delattr(self, name) 63 | self.vertices.remove(name) 64 | for node in self.vertices: 65 | node_obj = self.__getattribute__(node) 66 | if hasattr(node_obj, name): 67 | delattr(node_obj, name) 68 | node_obj.adjacent.remove(name) 69 | 70 | def add_edge(self, source, target, cost=None): 71 | source, target = str(source), str(target) 72 | error_msg = ("Vertex %s is not present in the graph." 73 | "Call Graph.add_vertex to add a new" 74 | "vertex. Graph.add_edge is only responsible" 75 | "for adding edges and it will not add new" 76 | "vertices on its own. This is done to maintain" 77 | "clear separation between the functionality of" 78 | "these two methods.") 79 | if not hasattr(self, source): 80 | raise ValueError(error_msg % (source)) 81 | if not hasattr(self, target): 82 | raise ValueError(error_msg % (target)) 83 | 84 | source, target = self.__getattribute__(source), \ 85 | self.__getattribute__(target) 86 | source.add_adjacent_node(target.name) 87 | if cost is not None: 88 | self.edge_weights[source.name + "_" + target.name] = \ 89 | GraphEdge(source, target, cost) 90 | 91 | def get_edge(self, source, target): 92 | return self.edge_weights.get( 93 | source + "_" + target, 94 | None) 95 | 96 | def remove_edge(self, source, target): 97 | source, target = self.__getattribute__(source), \ 98 | self.__getattribute__(target) 99 | source.remove_adjacent_node(target.name) 100 | self.edge_weights.pop(source.name + "_" + target.name, 101 | None) 102 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/tests/benchmarks/test_algorithms.py: -------------------------------------------------------------------------------- 1 | import random, timeit, functools, os, pytest 2 | from pydatastructs import (OneDimensionalArray, Backend, 3 | DynamicOneDimensionalArray, quick_sort, bubble_sort, selection_sort, 4 | insertion_sort, is_ordered, linear_search, binary_search, jump_search) 5 | 6 | def _test_common_sort(sort, **kwargs): 7 | cpp = Backend.CPP 8 | repeat = 2 9 | number = 2 10 | 11 | size = int(os.environ.get("PYDATASTRUCTS_BENCHMARK_SIZE", "1000")) 12 | size = kwargs.get("size", size) 13 | 14 | def _common(array_type, dtype, *args, **kwargs): 15 | array = array_type(dtype, *args, **kwargs) 16 | 17 | timer_python = timeit.Timer(functools.partial(sort, array)) 18 | python_backend = min(timer_python.repeat(repeat, number)) 19 | 20 | backend_dict = {"backend": cpp} 21 | timer_cpp = timeit.Timer(functools.partial(sort, array, **backend_dict)) 22 | cpp_backend = min(timer_cpp.repeat(repeat, number)) 23 | 24 | assert cpp_backend < python_backend 25 | 26 | # Case 1: int 27 | data = [random.randint(0, 2 * size) for _ in range(size)] 28 | _common(OneDimensionalArray, int, data, backend=cpp) 29 | 30 | # Case 3: float 31 | data = [random.random() * 2 * size for _ in range(size)] 32 | _common(OneDimensionalArray, float, data, backend=cpp) 33 | 34 | 35 | @pytest.mark.xfail 36 | def test_quick_sort(): 37 | _test_common_sort(quick_sort, size=4000) 38 | 39 | 40 | @pytest.mark.xfail 41 | def test_bubble_sort(): 42 | _test_common_sort(bubble_sort, size=2000) 43 | 44 | 45 | @pytest.mark.xfail 46 | def test_selection_sort(): 47 | _test_common_sort(selection_sort, size=2000) 48 | 49 | 50 | @pytest.mark.xfail 51 | def test_insertion_sort(): 52 | _test_common_sort(insertion_sort, size=2000) 53 | 54 | 55 | @pytest.mark.xfail 56 | def test_is_ordered(): 57 | cpp = Backend.CPP 58 | repeat = 2 59 | number = 2 60 | 61 | size = int(os.environ.get("PYDATASTRUCTS_BENCHMARK_SIZE", "4000")) 62 | 63 | def _common(array_type, dtype, *args, **kwargs): 64 | array = array_type(dtype, *args, **kwargs) 65 | 66 | timer_python = timeit.Timer(functools.partial(is_ordered, array)) 67 | python_backend = min(timer_python.repeat(repeat, number)) 68 | 69 | backend_dict = {"backend": cpp} 70 | timer_cpp = timeit.Timer(functools.partial(is_ordered, array, 71 | **backend_dict)) 72 | cpp_backend = min(timer_cpp.repeat(repeat, number)) 73 | 74 | assert cpp_backend < python_backend 75 | 76 | # Case 1: int 77 | data = [random.randint(0, 2 * size) for _ in range(size)] 78 | _common(OneDimensionalArray, int, data, backend=cpp) 79 | 80 | # Case 3: float 81 | data = [random.random() * 2 * size for _ in range(size)] 82 | _common(OneDimensionalArray, float, data, backend=cpp) 83 | 84 | 85 | @pytest.mark.xfail 86 | def test_search(): 87 | cpp = Backend.CPP 88 | repeat = 2 89 | number = 2 90 | 91 | size = int(os.environ.get("PYDATASTRUCTS_BENCHMARK_SIZE", "4000")) 92 | 93 | def _common(search_func, array_type, dtype, *args, **kwargs): 94 | array = array_type(dtype, *args, **kwargs) 95 | 96 | timer_python = timeit.Timer(functools.partial(search_func, array, array[size-1])) 97 | python_backend = min(timer_python.repeat(repeat, number)) 98 | 99 | backend_dict = {"backend": cpp} 100 | timer_cpp = timeit.Timer(functools.partial(search_func, array, array[size-1], 101 | **backend_dict)) 102 | cpp_backend = min(timer_cpp.repeat(repeat, number)) 103 | 104 | assert cpp_backend < python_backend 105 | 106 | # Case 1: int 107 | data = [random.randint(0, 2 * size) for _ in range(size)] 108 | _common(linear_search, OneDimensionalArray, int, data, backend=cpp) 109 | _common(binary_search, OneDimensionalArray, int, data, backend=cpp) 110 | _common(jump_search, OneDimensionalArray, int, data, backend=cpp) 111 | -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/cpp/TreeNode.hpp: -------------------------------------------------------------------------------- 1 | #ifndef UTILS_TREENODE_HPP 2 | #define UTILS_TREENODE_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | #include "Node.hpp" 8 | #include "utils.hpp" 9 | 10 | typedef struct { 11 | PyObject_HEAD 12 | NodeType_ type_tag; 13 | PyObject* key; 14 | PyObject* data; // can store None or a number 15 | PyObject* left; // can store None or a number 16 | PyObject* right; // can store None or a number 17 | bool is_root; 18 | long height; 19 | PyObject* parent; 20 | long size; 21 | long color; 22 | bool isCartesianTreeNode; 23 | double priority; 24 | } TreeNode; 25 | 26 | static void TreeNode_dealloc(TreeNode *self) { 27 | Py_TYPE(self)->tp_free(reinterpret_cast(self)); 28 | } 29 | 30 | static PyObject* TreeNode___new__(PyTypeObject* type, PyObject *args, PyObject *kwds) { 31 | TreeNode *self; 32 | self = reinterpret_cast(type->tp_alloc(type, 0)); 33 | self->type_tag = NodeType_::TreeNode; 34 | 35 | // Assume that arguments are in the order below. Python code is such that this is true. 36 | self->key = PyObject_GetItem(args, PyZero); 37 | self->data = PyObject_GetItem(args, PyOne); 38 | 39 | Py_INCREF(Py_None); 40 | self->left = Py_None; 41 | Py_INCREF(Py_None); 42 | self->right = Py_None; 43 | Py_INCREF(Py_None); 44 | self->parent = Py_None; 45 | self->height = 0; 46 | self->size = 1; 47 | self->is_root = false; 48 | self->color = 1; 49 | self->isCartesianTreeNode = false; 50 | 51 | return reinterpret_cast(self); 52 | } 53 | 54 | static PyObject* TreeNode___str__(TreeNode *self) { 55 | PyObject* out = Py_BuildValue("(OOOO)", self->left, self->key, self->data, self->right); 56 | Py_INCREF(out); 57 | return PyObject_Str(out); 58 | } 59 | 60 | static struct PyMemberDef TreeNode_PyMemberDef[] = { 61 | {"type_tag", T_INT, offsetof(TreeNode, type_tag), 0, "TreeNode type_tag"}, 62 | {"key", T_OBJECT, offsetof(TreeNode, key), 0, "TreeNode key"}, 63 | {"data", T_OBJECT, offsetof(TreeNode, data), 0, "TreeNode data"}, 64 | {"height", T_LONG, offsetof(TreeNode, height), 0, "TreeNode height"}, 65 | {"size", T_LONG, offsetof(TreeNode, size), 0, "TreeNode size"}, 66 | {"is_root", T_BOOL, offsetof(TreeNode, is_root), 0, "TreeNode is_root"}, 67 | {"left", T_OBJECT, offsetof(TreeNode, left), 0, "TreeNode left"}, 68 | {"right", T_OBJECT, offsetof(TreeNode, right), 0, "TreeNode right"}, 69 | {"parent", T_OBJECT, offsetof(TreeNode, parent), 0, "TreeNode parent"}, 70 | {"color", T_LONG, offsetof(TreeNode, size), 0, "RedBlackTreeNode color"}, 71 | {"priority", T_DOUBLE, offsetof(TreeNode, priority), 0, "CartesianTreeNode's priority"}, 72 | {NULL}, 73 | }; 74 | 75 | 76 | static PyTypeObject TreeNodeType = { 77 | /* tp_name */ PyVarObject_HEAD_INIT(NULL, 0) "TreeNode", 78 | /* tp_basicsize */ sizeof(TreeNode), 79 | /* tp_itemsize */ 0, 80 | /* tp_dealloc */ (destructor) TreeNode_dealloc, 81 | /* tp_print */ 0, 82 | /* tp_getattr */ 0, 83 | /* tp_setattr */ 0, 84 | /* tp_reserved */ 0, 85 | /* tp_repr */ 0, 86 | /* tp_as_number */ 0, 87 | /* tp_as_sequence */ 0, 88 | /* tp_as_mapping */ 0, 89 | /* tp_hash */ 0, 90 | /* tp_call */ 0, 91 | /* tp_str */ (reprfunc) TreeNode___str__, 92 | /* tp_getattro */ 0, 93 | /* tp_setattro */ 0, 94 | /* tp_as_buffer */ 0, 95 | /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, 96 | /* tp_doc */ 0, 97 | /* tp_traverse */ 0, 98 | /* tp_clear */ 0, 99 | /* tp_richcompare */ 0, 100 | /* tp_weaklistoffset */ 0, 101 | /* tp_iter */ 0, 102 | /* tp_iternext */ 0, 103 | /* tp_methods */ 0, 104 | /* tp_members */ TreeNode_PyMemberDef, 105 | /* tp_getset */ 0, 106 | /* tp_base */ &NodeType, // Class Node is the base class 107 | /* tp_dict */ 0, 108 | /* tp_descr_get */ 0, 109 | /* tp_descr_set */ 0, 110 | /* tp_dictoffset */ 0, 111 | /* tp_init */ 0, 112 | /* tp_alloc */ 0, 113 | /* tp_new */ TreeNode___new__, 114 | }; 115 | 116 | #endif 117 | -------------------------------------------------------------------------------- /docs/source/contributing.rst: -------------------------------------------------------------------------------- 1 | How to contribute? 2 | ================== 3 | 4 | Follow the steps given below, 5 | 6 | 1. Fork, https://github.com/codezonediitj/pydatastructs/ 7 | 2. Execute, ``git clone https://github.com/codezonediitj/pydatastructs/`` 8 | 3. Change your working directory to ``../pydatastructs``. 9 | 4. Execute, ``git remote add origin_user https://github.com//pydatastructs/`` 10 | 5. Execute, ``git checkout -b ``. 11 | 6. Make changes to the code. 12 | 7. Add your name and email to the ``AUTHORS``, if you wish to. 13 | 8. Execute, ``git add .``. 14 | 9. Execute, ``git commit -m "your-commit-message"``. 15 | 10. Execute, ``git push origin_user ``. 16 | 11. Make PR. 17 | 18 | That's it, 10 easy steps for your first contribution. For 19 | future contributions just follow steps 5 to 10. Make sure that 20 | before starting work, always checkout to ``master`` and pull the 21 | recent changes using the remote ``origin`` and then start from steps 22 | 5 to 10. 23 | 24 | See you soon with your first PR. 25 | 26 | It is recommended to go through the following links before you start working. 27 | 28 | - `Issue Policy `_ 29 | - `Pull Request Policy `_ 30 | - `Plan of Action for the Projects `_ 31 | 32 | Testing 33 | ------- 34 | 35 | For testing your patch locally follow the steps given below, 36 | 37 | 1. Install `pytest-cov `_. Skip this step if you are already having the package. 38 | 2. Run, ``python3 -m pytest --doctest-modules --cov=./ --cov-report=html``. Look for, ``htmlcov/index.html`` and open it 39 | in your browser, which will show the coverage report. Try to ensure that the coverage is not decreasing by more than 1% 40 | for your patch. 41 | 42 | For a good visualisation of the different data structures and algorithms, refer the following websites: 43 | 44 | - https://visualgo.net/ 45 | 46 | - https://www.cs.usfca.edu/~galles/visualization/ 47 | 48 | You can use the examples given in the following book as tests for your code: 49 | 50 | - `https://opendatastructures.org/ods-python.pdf `_ 51 | 52 | 53 | Guidelines 54 | ---------- 55 | 56 | We recommend you to join our `gitter channel `_ for discussing anything related to the project. 57 | 58 | Please follow the rules and guidelines given below, 59 | 60 | 1. Follow the `numpydoc docstring guide `_. 61 | 2. If you are planning to contribute a new data structure then first raise an **issue** for discussing the API, rather than directly making a PR. Please go through `Plan of Action for Adding New Data Structures `_. 62 | 3. For the first-time contributors we recommend not to take a complex data structure, rather start with ``beginner`` or ``easy``. 63 | 4. We don't assign issues to any individual. Instead, we follow First Come First Serve for taking over issues, i.e., if one contributor has already shown interest then no comment should be made after that as it won't be considered. Anyone willing to work on an issue can comment on the thread that he/she is working on and raise a PR for the same. 64 | 5. Any open PR must be provided with some updates after being reviewed. If it is stalled for more than 4 days, it will be labeled as ``Please take over``, meaning that anyone willing to continue that PR can start working on it. 65 | 6. PRs that are not related to the project or don't follow any guidelines will be labeled as ``Could Close``, meaning that the PR is not necessary at the moment. 66 | 67 | The following parameters are to be followed to pass the code quality tests for your Pull Requests, 68 | 69 | 1. There should not be any trailing white spaces at any line of code. 70 | 2. Each ``.py`` file should end with exactly one new line. 71 | 3. Comparisons involving ``True``, ``False`` and ``None`` should be done by 72 | reference (using ``is``, ``is not``) and not by value(``==``, ``!=``). 73 | 74 | Keep contributing!! 75 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/tests/test_queue.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.miscellaneous_data_structures import Queue 2 | from pydatastructs.miscellaneous_data_structures.queue import ( 3 | ArrayQueue, LinkedListQueue, PriorityQueue, 4 | LinkedListPriorityQueue) 5 | from pydatastructs.utils.raises_util import raises 6 | from pydatastructs.utils.misc_util import _check_type 7 | 8 | def test_Queue(): 9 | q = Queue(implementation='array') 10 | q1 = Queue() 11 | assert _check_type(q, ArrayQueue) is True 12 | assert _check_type(q1, ArrayQueue) is True 13 | q2 = Queue(implementation='linked_list') 14 | assert _check_type(q2, LinkedListQueue) is True 15 | assert raises(NotImplementedError, lambda: Queue(implementation='')) 16 | 17 | def test_ArrayQueue(): 18 | q1 = Queue() 19 | raises(IndexError, lambda: q1.popleft()) 20 | q1 = Queue(implementation='array', items=[0]) 21 | q1.append(1) 22 | q1.append(2) 23 | q1.append(3) 24 | assert str(q1) == '[0, 1, 2, 3]' 25 | assert len(q1) == 4 26 | assert q1.popleft() == 0 27 | assert q1.popleft() == 1 28 | assert len(q1) == 2 29 | assert q1.popleft() == 2 30 | assert q1.popleft() == 3 31 | assert len(q1) == 0 32 | 33 | q2 = Queue(implementation='array', items=[0], double_ended=True) 34 | q2.append(1) 35 | q2.append(2) 36 | q2.appendleft(3) 37 | assert str(q2) == '[3, 0, 1, 2]' 38 | assert len(q2) == 4 39 | assert q2.popleft() == 3 40 | assert q2.pop() == 2 41 | assert len(q2) == 2 42 | assert q2.popleft() == 0 43 | assert q2.pop() == 1 44 | assert len(q2) == 0 45 | 46 | q1 = Queue(implementation='array', items=[0]) 47 | assert raises(NotImplementedError, lambda: q1.appendleft(2)) 48 | 49 | 50 | def test_LinkedListQueue(): 51 | q1 = Queue(implementation='linked_list') 52 | q1.append(1) 53 | assert raises(TypeError, lambda: Queue(implementation='linked_list', items={0, 1})) 54 | q1 = Queue(implementation='linked_list', items = [0, 1]) 55 | q1.append(2) 56 | q1.append(3) 57 | assert str(q1) == ("['(0, None)', '(1, None)', " 58 | "'(2, None)', '(3, None)']") 59 | assert len(q1) == 4 60 | assert q1.popleft().key == 0 61 | assert q1.popleft().key == 1 62 | assert len(q1) == 2 63 | assert q1.popleft().key == 2 64 | assert q1.popleft().key == 3 65 | assert len(q1) == 0 66 | raises(IndexError, lambda: q1.popleft()) 67 | 68 | q1 = Queue(implementation='linked_list',items=['a',None,type,{}]) 69 | assert len(q1) == 4 70 | 71 | front = q1.front 72 | assert front.key == q1.popleft().key 73 | 74 | rear = q1.rear 75 | for _ in range(len(q1)-1): 76 | q1.popleft() 77 | 78 | assert rear.key == q1.popleft().key 79 | 80 | q1 = Queue(implementation='linked_list', double_ended=True) 81 | q1.appendleft(1) 82 | q2 = Queue(implementation='linked_list', items=[0, 1]) 83 | assert raises(NotImplementedError, lambda: q2.appendleft(1)) 84 | q1 = Queue(implementation='linked_list', items = [0, 1], double_ended=True) 85 | q1.appendleft(2) 86 | q1.append(3) 87 | assert str(q1) == "['(2, None)', '(0, None)', '(1, None)', '(3, None)']" 88 | assert len(q1) == 4 89 | assert q1.popleft().key == 2 90 | assert q1.pop().key == 3 91 | assert len(q1) == 2 92 | assert q1.pop().key == 1 93 | assert q1.popleft().key == 0 94 | assert len(q1) == 0 95 | assert raises(IndexError, lambda: q1.popleft()) 96 | 97 | def test_PriorityQueue(): 98 | pq1 = PriorityQueue(implementation='linked_list') 99 | assert _check_type(pq1, LinkedListPriorityQueue) is True 100 | assert raises(NotImplementedError, lambda: Queue(implementation='')) 101 | 102 | def test_ImplementationPriorityQueue(): 103 | impls = ['linked_list', 'binomial_heap', 'binary_heap'] 104 | for impl in impls: 105 | pq1 = PriorityQueue(implementation=impl) 106 | pq1.push(1, 4) 107 | pq1.push(2, 3) 108 | pq1.push(3, 2) 109 | assert pq1.peek.data == 3 110 | assert pq1.pop() == 3 111 | assert pq1.peek.data == 2 112 | assert pq1.pop() == 2 113 | assert pq1.peek.data == 1 114 | assert pq1.pop() == 1 115 | assert pq1.is_empty is True 116 | assert raises(IndexError, lambda: pq1.peek) 117 | -------------------------------------------------------------------------------- /pydatastructs/trees/_backend/cpp/Treap.hpp: -------------------------------------------------------------------------------- 1 | #ifndef TREES_TREAP_HPP 2 | #define TREES_TREAP_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | #include 8 | #include "../../../utils/_backend/cpp/utils.hpp" 9 | #include "../../../utils/_backend/cpp/TreeNode.hpp" 10 | #include "../../../linear_data_structures/_backend/cpp/arrays/ArrayForTrees.hpp" 11 | #include "../../../linear_data_structures/_backend/cpp/arrays/DynamicOneDimensionalArray.hpp" 12 | #include "BinarySearchTree.hpp" 13 | #include "SelfBalancingBinaryTree.hpp" 14 | #include "CartesianTree.hpp" 15 | 16 | typedef struct { 17 | PyObject_HEAD 18 | CartesianTree* ct; 19 | ArrayForTrees* tree; 20 | } Treap; 21 | 22 | static void Treap_dealloc(Treap *self) { 23 | Py_TYPE(self)->tp_free(reinterpret_cast(self)); 24 | } 25 | 26 | static PyObject* Treap___new__(PyTypeObject* type, PyObject *args, PyObject *kwds) { 27 | Treap *self; 28 | self = reinterpret_cast(type->tp_alloc(type, 0)); 29 | 30 | if (PyType_Ready(&CartesianTreeType) < 0) { // This has to be present to finalize a type object. This should be called on all type objects to finish their initialization. 31 | return NULL; 32 | } 33 | PyObject* p = CartesianTree___new__(&CartesianTreeType, args, kwds); 34 | self->ct = reinterpret_cast(p); 35 | self->tree = reinterpret_cast(p)->sbbt->bst->binary_tree->tree; 36 | 37 | return reinterpret_cast(self); 38 | } 39 | 40 | static PyObject* Treap___str__(Treap *self) { 41 | return CartesianTree___str__(self->ct); 42 | } 43 | 44 | static PyObject* Treap_search(Treap* self, PyObject *args, PyObject *kwds) { 45 | return CartesianTree_search(self->ct, args, kwds); 46 | } 47 | 48 | static PyObject* Treap_delete(Treap* self, PyObject *args, PyObject *kwds) { 49 | return CartesianTree_delete(self->ct, args, kwds); 50 | } 51 | 52 | static PyObject* Treap_insert(Treap *self, PyObject* args) { 53 | Py_INCREF(Py_None); 54 | PyObject* key = Py_None; 55 | Py_INCREF(Py_None); 56 | PyObject* data = Py_None; 57 | if (!PyArg_ParseTuple(args, "O|O", &key, &data)) { // data is optional 58 | return NULL; 59 | } 60 | PyObject* priority = PyFloat_FromDouble(((double) rand() / (RAND_MAX))); 61 | 62 | return CartesianTree_insert(self->ct, Py_BuildValue("(OOO)", key, priority, data)); 63 | } 64 | 65 | static PyObject* Treap_root_idx(Treap *self, void *closure) { 66 | return self->ct->sbbt->bst->binary_tree->root_idx; 67 | } 68 | 69 | 70 | static struct PyMethodDef Treap_PyMethodDef[] = { 71 | {"insert", (PyCFunction) Treap_insert, METH_VARARGS, NULL}, 72 | {"delete", (PyCFunction) Treap_delete, METH_VARARGS | METH_KEYWORDS, NULL}, 73 | {"search", (PyCFunction) Treap_search, METH_VARARGS | METH_KEYWORDS, NULL}, 74 | {NULL} /* Sentinel */ 75 | }; 76 | 77 | static PyGetSetDef Treap_GetterSetters[] = { 78 | {"root_idx", (getter) Treap_root_idx, NULL, "returns the index of the tree's root", NULL}, 79 | {NULL} /* Sentinel */ 80 | }; 81 | 82 | static PyMemberDef Treap_PyMemberDef[] = { 83 | {"tree", T_OBJECT_EX, offsetof(Treap, tree), 0, "tree"}, 84 | {NULL} /* Sentinel */ 85 | }; 86 | 87 | 88 | static PyTypeObject TreapType = { 89 | /* tp_name */ PyVarObject_HEAD_INIT(NULL, 0) "Treap", 90 | /* tp_basicsize */ sizeof(Treap), 91 | /* tp_itemsize */ 0, 92 | /* tp_dealloc */ (destructor) Treap_dealloc, 93 | /* tp_print */ 0, 94 | /* tp_getattr */ 0, 95 | /* tp_setattr */ 0, 96 | /* tp_reserved */ 0, 97 | /* tp_repr */ 0, 98 | /* tp_as_number */ 0, 99 | /* tp_as_sequence */ 0, 100 | /* tp_as_mapping */ 0, 101 | /* tp_hash */ 0, 102 | /* tp_call */ 0, 103 | /* tp_str */ (reprfunc) Treap___str__, 104 | /* tp_getattro */ 0, 105 | /* tp_setattro */ 0, 106 | /* tp_as_buffer */ 0, 107 | /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, 108 | /* tp_doc */ 0, 109 | /* tp_traverse */ 0, 110 | /* tp_clear */ 0, 111 | /* tp_richcompare */ 0, 112 | /* tp_weaklistoffset */ 0, 113 | /* tp_iter */ 0, 114 | /* tp_iternext */ 0, 115 | /* tp_methods */ Treap_PyMethodDef, 116 | /* tp_members */ Treap_PyMemberDef, 117 | /* tp_getset */ Treap_GetterSetters, 118 | /* tp_base */ &CartesianTreeType, 119 | /* tp_dict */ 0, 120 | /* tp_descr_get */ 0, 121 | /* tp_descr_set */ 0, 122 | /* tp_dictoffset */ 0, 123 | /* tp_init */ 0, 124 | /* tp_alloc */ 0, 125 | /* tp_new */ Treap___new__, 126 | }; 127 | 128 | #endif 129 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/_backend/cpp/algorithms/quick_sort.hpp: -------------------------------------------------------------------------------- 1 | #ifndef LINEAR_DATA_STRUCTURES_ALGORITHMS_QUICK_SORT_HPP 2 | #define LINEAR_DATA_STRUCTURES_ALGORITHMS_QUICK_SORT_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include "../arrays/OneDimensionalArray.hpp" 7 | #include "../arrays/DynamicOneDimensionalArray.hpp" 8 | #include "../../../../utils/_backend/cpp/utils.hpp" 9 | #include 10 | 11 | static PyObject* call_pick_pivot_element(PyObject* pick_pivot_element, 12 | size_t low, size_t high, PyObject* array) { 13 | PyObject* high_PyObject = PyLong_FromSize_t(high); 14 | if ( pick_pivot_element ) { 15 | return PyObject_CallFunctionObjArgs(pick_pivot_element, 16 | PyLong_FromSize_t(low), 17 | high_PyObject, 18 | array); 19 | } 20 | 21 | return PyObject_GetItem(array, high_PyObject); 22 | } 23 | 24 | static size_t quick_sort_partition(size_t low, size_t high, 25 | PyObject* pick_pivot_element, PyObject* comp, PyObject* array) { 26 | int64_t i = low - 1; 27 | PyObject* x = call_pick_pivot_element(pick_pivot_element, low, high, array); 28 | for( size_t j = low; j < high; j++ ) { 29 | PyObject* j_PyObject = PyLong_FromSize_t(j); 30 | if ( _comp(PyObject_GetItem(array, j_PyObject), x, comp) == 1 ) { 31 | i = i + 1; 32 | PyObject* i_PyObject = PyLong_FromLongLong(i); 33 | PyObject* tmp = PyObject_GetItem(array, i_PyObject); 34 | PyObject_SetItem(array, i_PyObject, 35 | PyObject_GetItem(array, j_PyObject)); 36 | PyObject_SetItem(array, j_PyObject, tmp); 37 | } 38 | } 39 | PyObject* i_PyObject = PyLong_FromLongLong(i + 1); 40 | PyObject* high_PyObject = PyLong_FromSize_t(high); 41 | PyObject* tmp = PyObject_GetItem(array, i_PyObject); 42 | PyObject_SetItem(array, i_PyObject, 43 | PyObject_GetItem(array, high_PyObject)); 44 | PyObject_SetItem(array, high_PyObject, tmp); 45 | return i + 1; 46 | } 47 | 48 | static PyObject* quick_sort_impl(PyObject* array, size_t lower, size_t upper, 49 | PyObject* comp, PyObject* pick_pivot_element) { 50 | 51 | size_t low, high, p; 52 | std::stack rstack; 53 | 54 | rstack.push(lower); 55 | rstack.push(upper); 56 | 57 | while ( !rstack.empty() ) { 58 | high = rstack.top(); 59 | rstack.pop(); 60 | low = rstack.top(); 61 | rstack.pop(); 62 | p = quick_sort_partition(low, high, pick_pivot_element, 63 | comp, array); 64 | if ( p - 1 > low ) { 65 | rstack.push(low); 66 | rstack.push(p - 1); 67 | } 68 | if ( p + 1 < high ) { 69 | rstack.push(p + 1); 70 | rstack.push(high); 71 | } 72 | } 73 | 74 | return array; 75 | } 76 | 77 | static PyObject* quick_sort(PyObject* self, PyObject* args, PyObject* kwds) { 78 | PyObject *args0 = NULL, *start = NULL, *end = NULL; 79 | PyObject *comp = NULL, *pick_pivot_element = NULL; 80 | size_t lower, upper; 81 | args0 = PyObject_GetItem(args, PyZero); 82 | int is_DynamicOneDimensionalArray = _check_type(args0, &DynamicOneDimensionalArrayType); 83 | int is_OneDimensionalArray = _check_type(args0, &OneDimensionalArrayType); 84 | if ( !is_DynamicOneDimensionalArray && !is_OneDimensionalArray ) { 85 | raise_exception_if_not_array(args0); 86 | return NULL; 87 | } 88 | comp = PyObject_GetItem(kwds, PyUnicode_FromString("comp")); 89 | if ( comp == NULL ) { 90 | PyErr_Clear(); 91 | } 92 | pick_pivot_element = PyObject_GetItem(kwds, PyUnicode_FromString("pick_pivot_element")); 93 | if ( pick_pivot_element == NULL ) { 94 | PyErr_Clear(); 95 | } 96 | start = PyObject_GetItem(kwds, PyUnicode_FromString("start")); 97 | if ( start == NULL ) { 98 | PyErr_Clear(); 99 | lower = 0; 100 | } else { 101 | lower = PyLong_AsSize_t(start); 102 | } 103 | end = PyObject_GetItem(kwds, PyUnicode_FromString("end")); 104 | if ( end == NULL ) { 105 | PyErr_Clear(); 106 | upper = PyObject_Length(args0) - 1; 107 | } else { 108 | upper = PyLong_AsSize_t(end); 109 | } 110 | 111 | args0 = quick_sort_impl(args0, lower, upper, comp, pick_pivot_element); 112 | if ( is_DynamicOneDimensionalArray ) { 113 | PyObject_CallMethod(args0, "_modify", "O", Py_True); 114 | } 115 | Py_INCREF(args0); 116 | return args0; 117 | } 118 | 119 | #endif 120 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/disjoint_set.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.utils import Set 2 | from pydatastructs.utils.misc_util import ( 3 | Backend, raise_if_backend_is_not_python) 4 | 5 | __all__ = ['DisjointSetForest'] 6 | 7 | class DisjointSetForest(object): 8 | """ 9 | Represents a forest of disjoint set trees. 10 | 11 | Parameters 12 | ========== 13 | 14 | backend: pydatastructs.Backend 15 | The backend to be used. 16 | Optional, by default, the best available 17 | backend is used. 18 | 19 | Examples 20 | ======== 21 | 22 | >>> from pydatastructs import DisjointSetForest 23 | >>> dst = DisjointSetForest() 24 | >>> dst.make_set(1) 25 | >>> dst.make_set(2) 26 | >>> dst.union(1, 2) 27 | >>> dst.find_root(2).key 28 | 1 29 | >>> dst.make_root(2) 30 | >>> dst.find_root(2).key 31 | 2 32 | 33 | References 34 | ========== 35 | 36 | .. [1] https://en.wikipedia.org/wiki/Disjoint-set_data_structure 37 | """ 38 | 39 | __slots__ = ['tree'] 40 | 41 | def __new__(cls, **kwargs): 42 | raise_if_backend_is_not_python( 43 | cls, kwargs.get('backend', Backend.PYTHON)) 44 | obj = object.__new__(cls) 45 | obj.tree = dict() 46 | return obj 47 | 48 | @classmethod 49 | def methods(cls): 50 | return ['make_set', '__new__', 'find_root', 'union'] 51 | 52 | def make_set(self, key, data=None): 53 | """ 54 | Adds a singleton set to the tree 55 | of disjoint sets with given key 56 | and optionally data. 57 | """ 58 | if self.tree.get(key, None) is None: 59 | new_set = Set(key, data) 60 | self.tree[key] = new_set 61 | new_set.parent = new_set 62 | new_set.size = 1 63 | 64 | def find_root(self, key): 65 | """ 66 | Finds the root of the set 67 | with the given key by path 68 | splitting algorithm. 69 | """ 70 | if self.tree.get(key, None) is None: 71 | raise KeyError("Invalid key, %s"%(key)) 72 | _set = self.tree[key] 73 | while _set.parent is not _set: 74 | _set, _set.parent = _set.parent, _set.parent.parent 75 | return _set 76 | 77 | def union(self, key1, key2): 78 | """ 79 | Takes the union of the two 80 | disjoint set trees with given 81 | keys. The union is done by size. 82 | """ 83 | x_root = self.find_root(key1) 84 | y_root = self.find_root(key2) 85 | 86 | if x_root is not y_root: 87 | if x_root.size < y_root.size: 88 | x_root, y_root = y_root, x_root 89 | 90 | y_root.parent = x_root 91 | x_root.size += y_root.size 92 | 93 | def make_root(self, key): 94 | """ 95 | Finds the set to which the key belongs 96 | and makes it as the root of the set. 97 | """ 98 | if self.tree.get(key, None) is None: 99 | raise KeyError("Invalid key, %s"%(key)) 100 | 101 | key_set = self.tree[key] 102 | if key_set.parent is not key_set: 103 | current_parent = key_set.parent 104 | # Remove this key subtree size from all its ancestors 105 | while current_parent.parent is not current_parent: 106 | current_parent.size -= key_set.size 107 | current_parent = current_parent.parent 108 | 109 | all_set_size = current_parent.size # This is the root node 110 | current_parent.size -= key_set.size 111 | 112 | # Make parent of current root as key 113 | current_parent.parent = key_set 114 | # size of new root will be same as previous root's size 115 | key_set.size = all_set_size 116 | # Make parent of key as itself 117 | key_set.parent = key_set 118 | 119 | def find_size(self, key): 120 | """ 121 | Finds the size of set to which the key belongs. 122 | """ 123 | if self.tree.get(key, None) is None: 124 | raise KeyError("Invalid key, %s"%(key)) 125 | 126 | return self.find_root(key).size 127 | 128 | def disjoint_sets(self): 129 | """ 130 | Returns a list of disjoint sets in the data structure. 131 | """ 132 | result = dict() 133 | for key in self.tree.keys(): 134 | parent = self.find_root(key).key 135 | members = result.get(parent, []) 136 | members.append(key) 137 | result[parent] = members 138 | sorted_groups = [] 139 | for v in result.values(): 140 | sorted_groups.append(v) 141 | sorted_groups[-1].sort() 142 | sorted_groups.sort() 143 | return sorted_groups 144 | -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/cpp/GraphEdge.hpp: -------------------------------------------------------------------------------- 1 | #ifndef GRAPH_EDGE_HPP 2 | #define GRAPH_EDGE_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include "GraphNode.hpp" 10 | 11 | extern PyTypeObject GraphEdgeType; 12 | 13 | typedef struct { 14 | PyObject_HEAD 15 | PyObject* source; 16 | PyObject* target; 17 | std::variant value; 18 | DataType value_type; 19 | } GraphEdge; 20 | 21 | static void GraphEdge_dealloc(GraphEdge* self) { 22 | Py_XDECREF(self->source); 23 | Py_XDECREF(self->target); 24 | Py_TYPE(self)->tp_free(reinterpret_cast(self)); 25 | } 26 | 27 | static PyObject* GraphEdge_new(PyTypeObject* type, PyObject* args, PyObject* kwds) { 28 | GraphEdge* self = PyObject_New(GraphEdge, &GraphEdgeType); 29 | if (!self) return NULL; 30 | 31 | new (&self->value) std::variant(); 32 | self->value_type = DataType::None; 33 | 34 | static char* kwlist[] = {"node1", "node2", "value", NULL}; 35 | PyObject* node1; 36 | PyObject* node2; 37 | PyObject* value = Py_None; 38 | 39 | if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO|O", kwlist, &node1, &node2, &value)) { 40 | PyErr_SetString(PyExc_ValueError, "Expected (GraphNode, GraphNode, optional value)"); 41 | return NULL; 42 | } 43 | 44 | Py_INCREF(node1); 45 | Py_INCREF(node2); 46 | self->source = node1; 47 | self->target = node2; 48 | 49 | if (value == Py_None) { 50 | self->value_type = DataType::None; 51 | self->value = std::monostate{}; 52 | } else if (PyLong_Check(value)) { 53 | self->value_type = DataType::Int; 54 | self->value = static_cast(PyLong_AsLongLong(value)); 55 | } else if (PyFloat_Check(value)) { 56 | self->value_type = DataType::Double; 57 | self->value = PyFloat_AsDouble(value); 58 | } else if (PyUnicode_Check(value)) { 59 | const char* str = PyUnicode_AsUTF8(value); 60 | self->value_type = DataType::String; 61 | self->value = std::string(str); 62 | } else { 63 | PyErr_SetString(PyExc_TypeError, "Unsupported edge value type (must be int, float, str, or None)"); 64 | return NULL; 65 | } 66 | 67 | return reinterpret_cast(self); 68 | } 69 | 70 | static PyObject* GraphEdge_str(GraphEdge* self) { 71 | std::string src = reinterpret_cast(self->source)->name; 72 | std::string tgt = reinterpret_cast(self->target)->name; 73 | std::string val_str; 74 | 75 | switch (self->value_type) { 76 | case DataType::Int: 77 | val_str = std::to_string(std::get(self->value)); 78 | break; 79 | case DataType::Double: 80 | val_str = std::to_string(std::get(self->value)); 81 | break; 82 | case DataType::String: 83 | val_str = std::get(self->value); 84 | break; 85 | case DataType::None: 86 | default: 87 | val_str = "None"; 88 | break; 89 | } 90 | 91 | return PyUnicode_FromFormat("('%s', '%s', %s)", src.c_str(), tgt.c_str(), val_str.c_str()); 92 | } 93 | 94 | static PyObject* GraphEdge_get_value(GraphEdge* self, void* closure) { 95 | switch (self->value_type) { 96 | case DataType::Int: 97 | return PyLong_FromLongLong(std::get(self->value)); 98 | case DataType::Double: 99 | return PyFloat_FromDouble(std::get(self->value)); 100 | case DataType::String: 101 | return PyUnicode_FromString(std::get(self->value).c_str()); 102 | case DataType::None: 103 | default: 104 | Py_RETURN_NONE; 105 | } 106 | } 107 | 108 | static int GraphEdge_set_value(GraphEdge* self, PyObject* value) { 109 | if (value == Py_None) { 110 | self->value_type = DataType::None; 111 | self->value = std::monostate{}; 112 | } else if (PyLong_Check(value)) { 113 | self->value_type = DataType::Int; 114 | self->value = static_cast(PyLong_AsLongLong(value)); 115 | } else if (PyFloat_Check(value)) { 116 | self->value_type = DataType::Double; 117 | self->value = PyFloat_AsDouble(value); 118 | } else if (PyUnicode_Check(value)) { 119 | const char* str = PyUnicode_AsUTF8(value); 120 | self->value_type = DataType::String; 121 | self->value = std::string(str); 122 | } else { 123 | PyErr_SetString(PyExc_TypeError, "Edge value must be int, float, str, or None."); 124 | return -1; 125 | } 126 | return 0; 127 | } 128 | 129 | static PyGetSetDef GraphEdge_getsetters[] = { 130 | {"value", (getter)GraphEdge_get_value, (setter)GraphEdge_set_value, "Get or set edge value", NULL}, 131 | {NULL} 132 | }; 133 | 134 | #endif 135 | -------------------------------------------------------------------------------- /docs/source/tutorials.rst: -------------------------------------------------------------------------------- 1 | Tutorials 2 | ========= 3 | 4 | We provide the following tutorials to show how ``pydatastructs`` 5 | APIs can help in solving complicated data structures and algorithms 6 | problems easily. For now the problems are abstract. However, we plan 7 | to add some more examples showing usage of ``pydatastructs`` on real world 8 | data sets such as `Stanford Large Network Dataset Collection `_ 9 | and `Urban Dictionary Words And Definitions `_. 10 | If you are interested in playing around with the above datasets using our API, 11 | then please feel free to reach out to us on our community channels. 12 | 13 | Max-Min Stream 14 | -------------- 15 | 16 | In this problem, we will be dealing with a stream of integer numbers. We have to 17 | display the ``k``-th largest and ``k``-th smallest number for all the prefixes of the 18 | input stream. In simple words, after reading each number, we have to display 19 | the ``k``-th largest and ``k``-th smallest number up until that number in the stream. 20 | If the size of the stream is smaller than ``k`` then we will display the minimum 21 | for ``k``-th smallest and maximum for ``k``-th largest numbers respectively. 22 | 23 | **Input Format** 24 | 25 | The first line of input will contain the value, ``k``. After that, each line of 26 | input will contain an integer representing the new number of the stream. The stopping 27 | point of the stream will be denoted by 0. Note that stopping point i.e., 0 will also 28 | be considered a part of the input stream. 29 | 30 | **Output Format** 31 | 32 | Each line of the output should contain two space separated numbers, the first one 33 | representing the ``k``-th largest/maximum number and the second one representing 34 | the ``k``-th smallest/minimum number. 35 | 36 | >>> from pydatastructs import BinaryHeap, Queue 37 | >>> def modify_heaps(min_heap, max_heap, curr_num, k): 38 | ... min_heap.insert(curr_num) 39 | ... max_heap.insert(curr_num) 40 | ... if min_heap.heap._num > k: 41 | ... min_heap.extract() 42 | ... if max_heap.heap._num > k: 43 | ... max_heap.extract() 44 | ... large, small = (max_heap.heap[0].key, min_heap.heap[0].key) 45 | ... return large, small 46 | ... 47 | >>> min_heap = BinaryHeap(heap_property='min') 48 | >>> max_heap = BinaryHeap(heap_property='max') 49 | >>> k = 2 50 | >>> curr_nums = Queue(items=[4, 5, 8, 0]) # input stream as a list 51 | >>> curr_num = curr_nums.popleft() 52 | >>> large_small = [] 53 | >>> while curr_num != 0: 54 | ... large, small = modify_heaps(min_heap, max_heap, curr_num, k) 55 | ... large_small.append((large, small)) 56 | ... curr_num = curr_nums.popleft() 57 | ... 58 | >>> large, small = modify_heaps(min_heap, max_heap, curr_num, k) 59 | >>> large_small.append((large, small)) 60 | >>> print(large_small) 61 | [(4, 4), (5, 4), (5, 5), (4, 5)] 62 | 63 | Minimise Network Delay 64 | ---------------------- 65 | 66 | In this problem there will be a network containing ``N`` nodes, labelled as 1 ... ``N``, and ``E`` edges. 67 | Any two nodes may be connected by an undirected edge ``E(u, v)`` and introduces a delay of time ``t(u, v)`` 68 | in transfer of information between the nodes ``u`` and ``v``. 69 | 70 | We will be given ``K`` queries where each query contains the source node and the destination node and 71 | we will be required to determine the minimum time it will take for a piece of information to start from 72 | the source node and reach at the destination node. 73 | 74 | We will assume that the size of information and the processing time at any node doesn’t affect the travel time. 75 | 76 | **Input Format** 77 | 78 | The first line will contain a single positive integer ``N``. 79 | 80 | The second line will contain a single positive integer ``E``. 81 | 82 | Then ``E`` lines will follow, each line containing three space separated integers. 83 | The first two denoting node labels connected by an undirected edge which introduces 84 | a time delay denoted by the third integer. 85 | 86 | After that the next line will contain a positive integer ``K``. 87 | 88 | Then ``K`` lines will follow each containing two space separated node labels, the 89 | first denoting the source node and the second one denoting the destination node for that query. 90 | 91 | **Output Format** 92 | 93 | ``K`` lines, each containing the minimum time required for the ``k``-th query. 94 | 95 | >>> from pydatastructs import Graph, AdjacencyListGraphNode 96 | >>> from pydatastructs.graphs.algorithms import shortest_paths 97 | >>> N = 4 98 | >>> E = 3 99 | >>> nodes = [] 100 | >>> for n in range(N): 101 | ... nodes.append(AdjacencyListGraphNode(str(n + 1))) 102 | ... 103 | >>> u_v_t = [(1, 2, 1), (2, 3, 1), (3, 4, 1)] # edges and their time delay 104 | >>> graph = Graph(*nodes) 105 | >>> for e in range(E): 106 | ... u, v, t = u_v_t[e] 107 | ... graph.add_edge(str(u), str(v), t) 108 | ... graph.add_edge(str(v), str(u), t) 109 | ... 110 | >>> K = 3 111 | >>> u_v = [(1, 4), (3, 2), (4, 3)] # queries 112 | >>> delays = [] 113 | >>> for k in range(K): 114 | ... u, v = u_v[k] 115 | ... delay = shortest_paths(graph, 'dijkstra', str(u))[0] 116 | ... delays.append(delay[str(v)]) 117 | ... 118 | >>> print(delays) 119 | [3, 1, 1] -------------------------------------------------------------------------------- /pydatastructs/trees/m_ary_trees.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.utils import MAryTreeNode 2 | from pydatastructs.linear_data_structures.arrays import ArrayForTrees 3 | from pydatastructs.utils.misc_util import ( 4 | Backend, raise_if_backend_is_not_python) 5 | 6 | __all__ = [ 7 | 'MAryTree' 8 | ] 9 | 10 | class MAryTree(object): 11 | """ 12 | Abstract m-ary tree. 13 | 14 | Parameters 15 | ========== 16 | 17 | key 18 | Required if tree is to be instantiated with 19 | root otherwise not needed. 20 | root_data 21 | Optional, the root node of the binary tree. 22 | If not of type MAryTreeNode, it will consider 23 | root as data and a new root node will 24 | be created. 25 | comp: lambda 26 | Optional, A lambda function which will be used 27 | for comparison of keys. Should return a 28 | bool value. By default it implements less 29 | than operator. 30 | is_order_statistic: bool 31 | Set it to True, if you want to use the 32 | order statistic features of the tree. 33 | max_children 34 | Optional, specifies the maximum number of children 35 | a node can have. Defaults to 2 in case nothing is 36 | specified. 37 | 38 | References 39 | ========== 40 | 41 | .. [1] https://en.wikipedia.org/wiki/M-ary_tree 42 | """ 43 | 44 | __slots__ = ['root_idx', 'max_children', 'comparator', 'tree', 'size', 45 | 'is_order_statistic'] 46 | 47 | 48 | def __new__(cls, key=None, root_data=None, comp=None, 49 | is_order_statistic=False, max_children=2, 50 | **kwargs): 51 | raise_if_backend_is_not_python( 52 | cls, kwargs.get('backend', Backend.PYTHON)) 53 | obj = object.__new__(cls) 54 | if key is None and root_data is not None: 55 | raise ValueError('Key required.') 56 | key = None if root_data is None else key 57 | root = MAryTreeNode(key, root_data) 58 | root.is_root = True 59 | obj.root_idx = 0 60 | obj.max_children = max_children 61 | obj.tree, obj.size = ArrayForTrees(MAryTreeNode, [root]), 1 62 | obj.comparator = lambda key1, key2: key1 < key2 \ 63 | if comp is None else comp 64 | obj.is_order_statistic = is_order_statistic 65 | return obj 66 | 67 | @classmethod 68 | def methods(cls): 69 | return ['__new__', '__str__'] 70 | 71 | def insert(self, key, data=None): 72 | """ 73 | Inserts data by the passed key using iterative 74 | algorithm. 75 | 76 | Parameters 77 | ========== 78 | 79 | key 80 | The key for comparison. 81 | data 82 | The data to be inserted. 83 | 84 | Returns 85 | ======= 86 | 87 | None 88 | """ 89 | raise NotImplementedError("This is an abstract method.") 90 | 91 | def delete(self, key, **kwargs): 92 | """ 93 | Deletes the data with the passed key 94 | using iterative algorithm. 95 | 96 | Parameters 97 | ========== 98 | 99 | key 100 | The key of the node which is 101 | to be deleted. 102 | 103 | Returns 104 | ======= 105 | 106 | True 107 | If the node is deleted successfully. 108 | 109 | None 110 | If the node to be deleted doesn't exists. 111 | 112 | Note 113 | ==== 114 | 115 | The node is deleted means that the connection to that 116 | node are removed but the it is still in tree. 117 | """ 118 | raise NotImplementedError("This is an abstract method.") 119 | 120 | def search(self, key, **kwargs): 121 | """ 122 | Searches for the data in the binary search tree 123 | using iterative algorithm. 124 | 125 | Parameters 126 | ========== 127 | 128 | key 129 | The key for searching. 130 | parent: bool 131 | If true then returns index of the 132 | parent of the node with the passed 133 | key. 134 | By default, False 135 | 136 | Returns 137 | ======= 138 | 139 | int 140 | If the node with the passed key is 141 | in the tree. 142 | tuple 143 | The index of the searched node and 144 | the index of the parent of that node. 145 | None 146 | In all other cases. 147 | """ 148 | raise NotImplementedError("This is an abstract method.") 149 | 150 | def to_binary_tree(self): 151 | """ 152 | Converts an m-ary tree to a binary tree. 153 | 154 | Returns 155 | ======= 156 | 157 | TreeNode 158 | The root of the newly created binary tree. 159 | """ 160 | raise NotImplementedError("This is an abstract method.") 161 | 162 | 163 | def __str__(self): 164 | to_be_printed = ['' for i in range(self.tree._last_pos_filled + 1)] 165 | for i in range(self.tree._last_pos_filled + 1): 166 | if self.tree[i] is not None: 167 | node = self.tree[i] 168 | to_be_printed[i] = (node.key, node.data) 169 | for j in node.children: 170 | if j is not None: 171 | to_be_printed[i].append(j) 172 | return str(to_be_printed) 173 | -------------------------------------------------------------------------------- /pydatastructs/graphs/graph.py: -------------------------------------------------------------------------------- 1 | 2 | from pydatastructs.utils.misc_util import Backend, raise_if_backend_is_not_python 3 | 4 | __all__ = [ 5 | 'Graph' 6 | ] 7 | 8 | class Graph(object): 9 | """ 10 | Represents generic concept of graphs. 11 | 12 | Parameters 13 | ========== 14 | 15 | implementation: str 16 | The implementation to be used for storing 17 | graph in memory. It can be figured out 18 | from type of the vertices(if passed at construction). 19 | Currently the following implementations are supported, 20 | 21 | 'adjacency_list' -> Adjacency list implementation. 22 | 23 | 'adjacency_matrix' -> Adjacency matrix implementation. 24 | 25 | By default, 'adjacency_list'. 26 | vertices: GraphNode(s) 27 | For AdjacencyList implementation vertices 28 | can be passed for initializing the graph. 29 | backend: pydatastructs.Backend 30 | The backend to be used. 31 | Optional, by default, the best available 32 | backend is used. 33 | 34 | Examples 35 | ======== 36 | 37 | >>> from pydatastructs.graphs import Graph 38 | >>> from pydatastructs.utils import AdjacencyListGraphNode 39 | >>> v_1 = AdjacencyListGraphNode('v_1', 1) 40 | >>> v_2 = AdjacencyListGraphNode('v_2', 2) 41 | >>> g = Graph(v_1, v_2) 42 | >>> g.add_edge('v_1', 'v_2') 43 | >>> g.add_edge('v_2', 'v_1') 44 | >>> g.is_adjacent('v_1', 'v_2') 45 | True 46 | >>> g.is_adjacent('v_2', 'v_1') 47 | True 48 | >>> g.remove_edge('v_1', 'v_2') 49 | >>> g.is_adjacent('v_1', 'v_2') 50 | False 51 | >>> g.is_adjacent('v_2', 'v_1') 52 | True 53 | 54 | References 55 | ========== 56 | 57 | .. [1] https://en.wikipedia.org/wiki/Graph_(abstract_data_type) 58 | 59 | Note 60 | ==== 61 | 62 | Make sure to create nodes (AdjacencyListGraphNode or AdjacencyMatrixGraphNode) 63 | and them in your graph using Graph.add_vertex before adding edges whose 64 | end points require either of the nodes that you added. In other words, 65 | Graph.add_edge doesn't add new nodes on its own if the input 66 | nodes are not already present in the Graph. 67 | 68 | """ 69 | 70 | __slots__ = ['_impl'] 71 | 72 | def __new__(cls, *args, **kwargs): 73 | backend = kwargs.get('backend', Backend.PYTHON) 74 | try: 75 | default_impl = args[0]._impl if args else 'adjacency_list' 76 | except: 77 | default_impl = 'adjacency_list' 78 | implementation = kwargs.get('implementation', default_impl) 79 | if implementation == 'adjacency_list': 80 | from pydatastructs.graphs.adjacency_list import AdjacencyList 81 | obj = AdjacencyList(*args, **kwargs) 82 | return obj 83 | elif implementation == 'adjacency_matrix': 84 | from pydatastructs.graphs.adjacency_matrix import AdjacencyMatrix 85 | obj = AdjacencyMatrix(*args, **kwargs) 86 | return obj 87 | else: 88 | raise NotImplementedError("%s implementation is not a part " 89 | "of the library currently."%(implementation)) 90 | 91 | def is_adjacent(self, node1, node2): 92 | """ 93 | Checks if the nodes with the given 94 | with the given names are adjacent 95 | to each other. 96 | """ 97 | raise NotImplementedError( 98 | "This is an abstract method.") 99 | 100 | def neighbors(self, node): 101 | """ 102 | Lists the neighbors of the node 103 | with given name. 104 | """ 105 | raise NotImplementedError( 106 | "This is an abstract method.") 107 | 108 | def add_vertex(self, node): 109 | """ 110 | Adds the input vertex to the node, or does nothing 111 | if the input vertex is already in the graph. 112 | """ 113 | raise NotImplementedError( 114 | "This is an abstract method.") 115 | 116 | def remove_vertex(self, node): 117 | """ 118 | Removes the input vertex along with all the edges 119 | pointing towards it. 120 | """ 121 | raise NotImplementedError( 122 | "This is an abstract method.") 123 | 124 | def add_edge(self, source, target, cost=None): 125 | """ 126 | Adds the edge starting at first parameter 127 | i.e., source and ending at the second 128 | parameter i.e., target. 129 | """ 130 | raise NotImplementedError( 131 | "This is an abstract method.") 132 | 133 | def get_edge(self, source, target): 134 | """ 135 | Returns GraphEdge object if there 136 | is an edge between source and target 137 | otherwise None. 138 | """ 139 | raise NotImplementedError( 140 | "This is an abstract method.") 141 | 142 | def remove_edge(self, source, target): 143 | """ 144 | Removes the edge starting at first parameter 145 | i.e., source and ending at the second 146 | parameter i.e., target. 147 | """ 148 | raise NotImplementedError( 149 | "This is an abstract method.") 150 | 151 | def num_vertices(self): 152 | """ 153 | Number of vertices 154 | """ 155 | raise NotImplementedError( 156 | "This is an abstract method.") 157 | 158 | def num_edges(self): 159 | """ 160 | Number of edges 161 | """ 162 | raise NotImplementedError( 163 | "This is an abstract method.") 164 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | test-ubuntu-py39-coverage: 11 | runs-on: ubuntu-latest 12 | timeout-minutes: 20 13 | 14 | steps: 15 | - uses: actions/checkout@v3 16 | 17 | - name: Set up Python 3.9 18 | uses: actions/setup-python@v4 19 | with: 20 | python-version: "3.9" 21 | 22 | - name: Upgrade pip version 23 | run: | 24 | python -m pip install --upgrade pip 25 | 26 | - name: Install requirements 27 | run: | 28 | python -m pip install -r requirements.txt 29 | python -m pip install -r docs/requirements.txt 30 | python -m pip install spin 31 | 32 | - name: Install lcov 33 | run: | 34 | sudo apt-get update 35 | sudo apt-get install -y lcov 36 | 37 | - name: Build package 38 | env: 39 | CXXFLAGS: "-std=c++17 --coverage" 40 | CFLAGS: "--coverage" 41 | run: | 42 | spin build -v 43 | 44 | - name: Run tests 45 | run: | 46 | spin test -v 47 | 48 | - name: Capture Coverage Data with lcov 49 | run: | 50 | lcov --capture --directory . --output-file coverage.info --no-external 51 | 52 | - name: Generate HTML Coverage Report with genhtml 53 | run: | 54 | genhtml coverage.info --output-directory coverage_report 55 | 56 | - name: Upload Coverage 57 | uses: codecov/codecov-action@v3 58 | with: 59 | token: ${{ secrets.CODECOV_TOKEN }} 60 | directory: ./coverage/reports/ 61 | env_vars: OS,PYTHON 62 | fail_ci_if_error: false 63 | files: ./coverage.xml 64 | flags: unittests 65 | name: codecov-umbrella 66 | path_to_write_report: ./coverage/codecov_report.txt 67 | verbose: true 68 | 69 | - name: Build Documentation 70 | run: | 71 | sphinx-build -b html docs/source/ docs/build/html 72 | 73 | test-ubuntu-py39-py310-py311: 74 | runs-on: ${{matrix.os}} 75 | timeout-minutes: 20 76 | strategy: 77 | fail-fast: false 78 | matrix: 79 | os: [ubuntu-latest] 80 | python-version: 81 | - "3.9" 82 | - "3.10" 83 | - "3.11" 84 | 85 | steps: 86 | - uses: actions/checkout@v3 87 | 88 | - name: Set up Python ${{ matrix.python-version }} 89 | uses: actions/setup-python@v4 90 | with: 91 | python-version: ${{ matrix.python-version }} 92 | 93 | - name: Upgrade pip version 94 | run: | 95 | python -m pip install --upgrade pip 96 | 97 | - name: Install requirements 98 | run: | 99 | python -m pip install -r requirements.txt 100 | python -m pip install -r docs/requirements.txt 101 | python -m pip install spin 102 | 103 | - name: Build package 104 | env: 105 | CXXFLAGS: "-std=c++17" 106 | run: | 107 | spin build -v 108 | 109 | - name: Run tests 110 | run: | 111 | spin test -v 112 | 113 | - name: Build Documentation 114 | run: | 115 | sphinx-build -b html docs/source/ docs/build/html 116 | 117 | test-macos: 118 | runs-on: ${{matrix.os}} 119 | timeout-minutes: 20 120 | strategy: 121 | fail-fast: false 122 | matrix: 123 | os: [macos-latest] 124 | python-version: 125 | - "3.9" 126 | - "3.10" 127 | - "3.11" 128 | 129 | steps: 130 | - uses: actions/checkout@v3 131 | 132 | - name: Set up Python ${{ matrix.python-version }} 133 | uses: actions/setup-python@v4 134 | with: 135 | python-version: ${{ matrix.python-version }} 136 | 137 | - name: Upgrade pip version 138 | run: | 139 | python -m pip install --upgrade pip 140 | 141 | - name: Install requirements 142 | run: | 143 | python -m pip install -r requirements.txt 144 | python -m pip install -r docs/requirements.txt 145 | python -m pip install spin 146 | 147 | - name: Build package 148 | env: 149 | MACOSX_DEPLOYMENT_TARGET: 11.0 150 | CXXFLAGS: "-std=c++17" 151 | run: | 152 | spin build -v 153 | 154 | - name: Run tests 155 | run: | 156 | spin test -v 157 | 158 | - name: Build Documentation 159 | run: | 160 | sphinx-build -b html docs/source/ docs/build/html 161 | 162 | test-windows: 163 | runs-on: windows-latest 164 | strategy: 165 | fail-fast: false 166 | matrix: 167 | python-version: ["3.9", "3.10", "3.11"] 168 | 169 | steps: 170 | - uses: actions/checkout@v3 171 | 172 | - uses: actions/setup-python@v4 173 | with: 174 | python-version: ${{ matrix.python-version }} 175 | 176 | - uses: ilammy/msvc-dev-cmd@v1 177 | 178 | - name: Upgrade pip 179 | run: python -m pip install --upgrade pip 180 | 181 | - name: Install requirements 182 | run: | 183 | python -m pip install -r requirements.txt 184 | python -m pip install -r docs/requirements.txt 185 | python -m pip install spin 186 | 187 | - name: Build package 188 | env: 189 | CFLAGS: "/MD" 190 | CXXFLAGS: "/std:c++17 /MD /Zc:strictStrings-" 191 | CL: "/std:c++17 /MD /Zc:strictStrings-" 192 | run: spin build -v 193 | 194 | - name: Run tests 195 | run: spin test -v 196 | 197 | - name: Build Documentation 198 | run: sphinx-build -b html docs/source/ docs/build/html 199 | -------------------------------------------------------------------------------- /pydatastructs/strings/trie.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.utils.misc_util import ( 2 | TrieNode, Backend, 3 | raise_if_backend_is_not_python) 4 | from collections import deque 5 | import copy 6 | 7 | __all__ = [ 8 | 'Trie' 9 | ] 10 | 11 | Stack = Queue = deque 12 | 13 | class Trie(object): 14 | """ 15 | Represents the trie data structure for storing strings. 16 | 17 | Parameters 18 | ========== 19 | 20 | backend: pydatastructs.Backend 21 | The backend to be used. 22 | Optional, by default, the best available 23 | backend is used. 24 | 25 | Examples 26 | ======== 27 | 28 | >>> from pydatastructs import Trie 29 | >>> trie = Trie() 30 | >>> trie.insert("a") 31 | >>> trie.insert("aa") 32 | >>> trie.strings_with_prefix("a") 33 | ['a', 'aa'] 34 | >>> trie.is_present("aa") 35 | True 36 | >>> trie.delete("aa") 37 | True 38 | >>> trie.is_present("aa") 39 | False 40 | 41 | References 42 | ========== 43 | 44 | .. [1] https://en.wikipedia.org/wiki/Trie 45 | """ 46 | 47 | __slots__ = ['root'] 48 | 49 | @classmethod 50 | def methods(cls): 51 | return ['__new__', 'insert', 'is_present', 'delete', 52 | 'strings_with_prefix'] 53 | 54 | def __new__(cls, **kwargs): 55 | raise_if_backend_is_not_python( 56 | cls, kwargs.get('backend', Backend.PYTHON)) 57 | obj = object.__new__(cls) 58 | obj.root = TrieNode() 59 | return obj 60 | 61 | def insert(self, string: str) -> None: 62 | """ 63 | Inserts the given string into the trie. 64 | 65 | Parameters 66 | ========== 67 | 68 | string: str 69 | 70 | Returns 71 | ======= 72 | 73 | None 74 | """ 75 | walk = self.root 76 | for char in string: 77 | if walk.get_child(char) is None: 78 | newNode = TrieNode(char) 79 | walk.add_child(newNode) 80 | walk = newNode 81 | else: 82 | walk = walk.get_child(char) 83 | walk.is_terminal = True 84 | 85 | def is_present(self, string: str) -> bool: 86 | """ 87 | Checks if the given string is present as a prefix in the trie. 88 | 89 | Parameters 90 | ========== 91 | 92 | string: str 93 | 94 | Returns 95 | ======= 96 | 97 | True if the given string is present as a prefix; 98 | False in all other cases. 99 | """ 100 | walk = self.root 101 | for char in string: 102 | if walk.get_child(char) is None: 103 | return False 104 | walk = walk.get_child(char) 105 | return True 106 | 107 | def is_inserted(self, string: str) -> bool: 108 | """ 109 | Checks if the given string was inserted in the trie. 110 | 111 | Parameters 112 | ========== 113 | 114 | string: str 115 | 116 | Returns 117 | ======= 118 | 119 | True if the given string was inserted in trie; 120 | False in all other cases. 121 | """ 122 | walk = self.root 123 | for char in string: 124 | if walk.get_child(char) is None: 125 | return False 126 | walk = walk.get_child(char) 127 | return walk.is_terminal 128 | 129 | def delete(self, string: str) -> bool: 130 | """ 131 | Deletes the given string from the trie. 132 | 133 | Parameters 134 | ========== 135 | 136 | string: str 137 | 138 | Returns 139 | ======= 140 | 141 | True if successfully deleted; 142 | None if the string is not present in the trie. 143 | """ 144 | path = [] 145 | walk = self.root 146 | size = len(string) 147 | for i in range(size): 148 | char = string[i] 149 | path.append(walk) 150 | if walk.get_child(char) is None: 151 | return None 152 | walk = walk.get_child(char) 153 | path.append(walk) 154 | i = len(path) - 1 155 | path[i].is_terminal = False 156 | while not path[i]._children and i >= 1: 157 | path[i-1].remove_child(path[i].char) 158 | i -= 1 159 | if path[i].is_terminal: 160 | return True 161 | return True 162 | 163 | def strings_with_prefix(self, string: str) -> list: 164 | """ 165 | Generates a list of all strings with the given prefix. 166 | 167 | Parameters 168 | ========== 169 | 170 | string: str 171 | 172 | Returns 173 | ======= 174 | 175 | strings: list 176 | The list of strings with the given prefix. 177 | """ 178 | 179 | def _collect(prefix: str, node: TrieNode, strings: list) -> str: 180 | TrieNode_stack = Stack() 181 | TrieNode_stack.append((node, prefix)) 182 | while TrieNode_stack: 183 | walk, curr_prefix = TrieNode_stack.pop() 184 | if walk.is_terminal: 185 | strings.append(curr_prefix + walk.char) 186 | for child in walk._children: 187 | TrieNode_stack.append((walk.get_child(child), curr_prefix + walk.char)) 188 | 189 | strings = [] 190 | prefix = "" 191 | walk = self.root 192 | for char in string: 193 | walk = walk.get_child(char) 194 | if walk is None: 195 | return strings 196 | prefix += char 197 | if walk.is_terminal: 198 | strings.append(walk.char) 199 | for child in walk._children: 200 | _collect(prefix, walk.get_child(child), strings) 201 | return strings 202 | -------------------------------------------------------------------------------- /pydatastructs/utils/_backend/cpp/utils.hpp: -------------------------------------------------------------------------------- 1 | #ifndef UTILS_UTILS_HPP 2 | #define UTILS_UTILS_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | #include 8 | 9 | static PyObject* get_PyZero() { 10 | static PyObject* PyZero = nullptr; 11 | if (PyZero == nullptr) { 12 | PyZero = PyLong_FromLong(0); 13 | } 14 | return PyZero; 15 | } 16 | 17 | static PyObject* get_PyOne() { 18 | static PyObject* PyOne = nullptr; 19 | if (PyOne == nullptr) { 20 | PyOne = PyLong_FromLong(1); 21 | } 22 | return PyOne; 23 | } 24 | 25 | static PyObject* get_PyTwo() { 26 | static PyObject* PyTwo = nullptr; 27 | if (PyTwo == nullptr) { 28 | PyTwo = PyLong_FromLong(2); 29 | } 30 | return PyTwo; 31 | } 32 | 33 | static PyObject* get_PyThree() { 34 | static PyObject* PyThree = nullptr; 35 | if (PyThree == nullptr) { 36 | PyThree = PyLong_FromLong(3); 37 | } 38 | return PyThree; 39 | } 40 | #define PyZero get_PyZero() 41 | #define PyOne get_PyOne() 42 | #define PyTwo get_PyTwo() 43 | #define PyThree get_PyThree() 44 | static const char* _encoding = "utf-8"; 45 | static const char* _invalid_char = ""; 46 | 47 | static char* PyObject_AsString(PyObject* obj) { 48 | return PyBytes_AS_STRING(PyUnicode_AsEncodedString(obj, _encoding, _invalid_char)); 49 | } 50 | 51 | static PyObject* __str__(PyObject** array, size_t size, long last_pos_filled=-1) { 52 | std::string array___str__ = "["; 53 | size_t end = last_pos_filled == -1 ? size : (size_t) (last_pos_filled + 1); 54 | for( size_t i = 0; i < end; i++ ) { 55 | if ( array[i] == Py_None ) { 56 | array___str__.append("''"); 57 | } else { 58 | PyObject* array_i = PyObject_Str(array[i]); 59 | char* i___str__ = PyObject_AsString(array_i); 60 | array___str__.append("'" + std::string(i___str__) + "'"); 61 | } 62 | if ( i + 1 != end ) { 63 | array___str__.append(", "); 64 | } 65 | } 66 | array___str__.push_back(']'); 67 | return PyUnicode_FromString(array___str__.c_str()); 68 | } 69 | 70 | static int set_exception_if_dtype_mismatch(PyObject* value, PyObject* dtype) { 71 | if ( !PyObject_IsInstance(value, dtype) ) { 72 | PyErr_WriteUnraisable( 73 | PyErr_Format(PyExc_TypeError, 74 | "Unable to store %s object in %s type array.", 75 | PyObject_AsString(PyObject_Repr(PyObject_Type(value))), 76 | PyObject_AsString(PyObject_Repr(dtype)))); 77 | return 1; 78 | } 79 | return 0; 80 | } 81 | 82 | static int raise_exception_if_dtype_mismatch(PyObject* value, PyObject* dtype) { 83 | if ( !PyObject_IsInstance(value, dtype) ) { 84 | PyErr_Format(PyExc_TypeError, 85 | "Unable to store %s object in %s type array.", 86 | PyObject_AsString(PyObject_Repr(PyObject_Type(value))), 87 | PyObject_AsString(PyObject_Repr(dtype))); 88 | return 1; 89 | } 90 | return 0; 91 | } 92 | 93 | static int raise_exception_if_not_array(PyObject* arg) { 94 | PyErr_Format(PyExc_TypeError, 95 | ("Unable to sort %s data structure. " 96 | "Only accepted types are OneDimensionalArray and DynamicOneDinesionalArray"), 97 | PyObject_AsString(PyObject_Repr(PyObject_Type(arg))) 98 | ); 99 | return 1; 100 | } 101 | 102 | static int _check_type(PyObject* arg, PyTypeObject* type) { 103 | return strcmp(arg->ob_type->tp_name, type->tp_name) == 0; 104 | } 105 | 106 | static int _comp(PyObject* u, PyObject* v, PyObject* tcomp) { 107 | int u_isNone = u == Py_None; 108 | int v_isNone = v == Py_None; 109 | if ( u_isNone && !v_isNone) { 110 | return 0; 111 | } 112 | if ( !u_isNone && v_isNone ) { 113 | return 1; 114 | } 115 | if ( u_isNone && v_isNone ) { 116 | return 0; 117 | } 118 | if ( tcomp ) { 119 | PyObject* result_PyObject = PyObject_CallFunctionObjArgs(tcomp, u, v, NULL); 120 | if ( !result_PyObject ) { 121 | PyErr_Format(PyExc_ValueError, 122 | "Unable to compare %s object with %s object.", 123 | PyObject_AsString(PyObject_Repr(PyObject_Type(u))), 124 | PyObject_AsString(PyObject_Repr(PyObject_Type(v))) 125 | ); 126 | } 127 | return result_PyObject == Py_True; 128 | } 129 | 130 | int result = PyObject_RichCompareBool(u, v, Py_LE); 131 | if ( result == -1 ) { 132 | PyErr_Format(PyExc_ValueError, 133 | "Unable to compare %s object with %s object.", 134 | PyObject_AsString(PyObject_Repr(PyObject_Type(u))), 135 | PyObject_AsString(PyObject_Repr(PyObject_Type(v))) 136 | ); 137 | } 138 | return result; 139 | } 140 | 141 | enum class NodeType_ { 142 | InvalidType, 143 | Node, 144 | TreeNode, 145 | GraphNode, 146 | AdjacencyListGraphNode, 147 | AdjacencyMatrixGraphNode, 148 | GraphEdge 149 | }; 150 | 151 | static NodeType_ get_type_tag(PyObject *node_obj) { 152 | if (!PyObject_HasAttrString(node_obj, "type_tag")) { 153 | return NodeType_::InvalidType; // attribute missing 154 | } 155 | 156 | PyObject *attr = PyObject_GetAttrString(node_obj, "type_tag"); 157 | if (!attr) { 158 | return NodeType_::InvalidType; // getattr failed 159 | } 160 | 161 | if (!PyLong_Check(attr)) { 162 | Py_DECREF(attr); 163 | return NodeType_::InvalidType; // not an int 164 | } 165 | 166 | int tag = (int)PyLong_AsLong(attr); 167 | Py_DECREF(attr); 168 | 169 | if (PyErr_Occurred()) { 170 | return NodeType_::InvalidType; // overflow or error in cast 171 | } 172 | 173 | return static_cast(tag); 174 | } 175 | 176 | 177 | #endif 178 | -------------------------------------------------------------------------------- /pydatastructs/miscellaneous_data_structures/stack.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.linear_data_structures import DynamicOneDimensionalArray, SinglyLinkedList 2 | from pydatastructs.miscellaneous_data_structures._backend.cpp import _stack 3 | from pydatastructs.utils.misc_util import ( 4 | _check_type, NoneType, Backend, 5 | raise_if_backend_is_not_python) 6 | from copy import deepcopy as dc 7 | 8 | __all__ = [ 9 | 'Stack' 10 | ] 11 | 12 | class Stack(object): 13 | """Representation of stack data structure 14 | 15 | Parameters 16 | ========== 17 | 18 | implementation : str 19 | Implementation to be used for stack. 20 | By default, 'array' 21 | Currently only supports 'array' 22 | implementation. 23 | items : list/tuple 24 | Optional, by default, None 25 | The inital items in the stack. 26 | For array implementation. 27 | dtype : A valid python type 28 | Optional, by default NoneType if item 29 | is None, otherwise takes the data 30 | type of DynamicOneDimensionalArray 31 | For array implementation. 32 | backend: pydatastructs.Backend 33 | The backend to be used. 34 | Optional, by default, the best available 35 | backend is used. 36 | 37 | Examples 38 | ======== 39 | 40 | >>> from pydatastructs import Stack 41 | >>> s = Stack() 42 | >>> s.push(1) 43 | >>> s.push(2) 44 | >>> s.push(3) 45 | >>> str(s) 46 | '[1, 2, 3]' 47 | >>> s.pop() 48 | 3 49 | 50 | References 51 | ========== 52 | 53 | .. [1] https://en.wikipedia.org/wiki/Stack_(abstract_data_type) 54 | """ 55 | 56 | def __new__(cls, implementation='array', **kwargs): 57 | backend = kwargs.get('backend', Backend.PYTHON) 58 | if implementation == 'array': 59 | items = kwargs.get('items', None) 60 | dtype = kwargs.get('dtype', int) 61 | if backend == Backend.CPP: 62 | return _stack.ArrayStack(items, dtype) 63 | 64 | return ArrayStack(items, dtype) 65 | if implementation == 'linked_list': 66 | raise_if_backend_is_not_python(cls, backend) 67 | 68 | return LinkedListStack( 69 | kwargs.get('items', None) 70 | ) 71 | raise NotImplementedError( 72 | "%s hasn't been implemented yet."%(implementation)) 73 | 74 | @classmethod 75 | def methods(cls): 76 | return ['__new__'] 77 | 78 | def push(self, *args, **kwargs): 79 | raise NotImplementedError( 80 | "This is an abstract method.") 81 | 82 | def pop(self, *args, **kwargs): 83 | raise NotImplementedError( 84 | "This is an abstract method.") 85 | 86 | @property 87 | def is_empty(self): 88 | raise NotImplementedError( 89 | "This is an abstract method.") 90 | 91 | @property 92 | def peek(self): 93 | raise NotImplementedError( 94 | "This is an abstract method.") 95 | 96 | class ArrayStack(Stack): 97 | 98 | __slots__ = ['items'] 99 | 100 | def __new__(cls, items=None, dtype=NoneType, 101 | **kwargs): 102 | raise_if_backend_is_not_python( 103 | cls, kwargs.get('backend', Backend.PYTHON)) 104 | if items is None: 105 | items = DynamicOneDimensionalArray(dtype, 0) 106 | else: 107 | items = DynamicOneDimensionalArray(dtype, items) 108 | obj = object.__new__(cls) 109 | obj.items = items 110 | return obj 111 | 112 | @classmethod 113 | def methods(cls): 114 | return ['__new__', 'push', 'pop', 'is_emtpy', 115 | 'peek', '__len__', '__str__'] 116 | 117 | def push(self, x): 118 | if self.is_empty: 119 | self.items._dtype = type(x) 120 | self.items.append(x) 121 | 122 | def pop(self): 123 | if self.is_empty: 124 | raise IndexError("Stack is empty") 125 | 126 | top_element = dc(self.items[self.items._last_pos_filled]) 127 | self.items.delete(self.items._last_pos_filled) 128 | return top_element 129 | 130 | @property 131 | def is_empty(self): 132 | return self.items._last_pos_filled == -1 133 | 134 | @property 135 | def peek(self): 136 | return self.items[self.items._last_pos_filled] 137 | 138 | def __len__(self): 139 | return self.items._num 140 | 141 | def __str__(self): 142 | """ 143 | Used for printing. 144 | """ 145 | return str(self.items._data) 146 | 147 | 148 | class LinkedListStack(Stack): 149 | 150 | __slots__ = ['stack'] 151 | 152 | def __new__(cls, items=None, **kwargs): 153 | raise_if_backend_is_not_python( 154 | cls, kwargs.get('backend', Backend.PYTHON)) 155 | obj = object.__new__(cls) 156 | obj.stack = SinglyLinkedList() 157 | if items is None: 158 | pass 159 | elif type(items) in (list, tuple): 160 | for x in items: 161 | obj.push(x) 162 | else: 163 | raise TypeError("Expected type: list/tuple") 164 | return obj 165 | 166 | @classmethod 167 | def methods(cls): 168 | return ['__new__', 'push', 'pop', 'is_emtpy', 169 | 'peek', '__len__', '__str__'] 170 | 171 | def push(self, x): 172 | self.stack.appendleft(x) 173 | 174 | def pop(self): 175 | if self.is_empty: 176 | raise IndexError("Stack is empty") 177 | return self.stack.popleft() 178 | 179 | @property 180 | def is_empty(self): 181 | return self.__len__() == 0 182 | 183 | @property 184 | def peek(self): 185 | return self.stack.head 186 | 187 | @property 188 | def size(self): 189 | return self.stack.size 190 | 191 | def __len__(self): 192 | return self.stack.size 193 | 194 | def __str__(self): 195 | elements = [] 196 | current_node = self.peek 197 | while current_node is not None: 198 | elements.append(str(current_node)) 199 | current_node = current_node.next 200 | return str(elements[::-1]) 201 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/tests/test_arrays.py: -------------------------------------------------------------------------------- 1 | from pydatastructs.linear_data_structures import ( 2 | OneDimensionalArray, DynamicOneDimensionalArray, 3 | MultiDimensionalArray, ArrayForTrees) 4 | from pydatastructs.utils.misc_util import Backend 5 | from pydatastructs.utils.raises_util import raises 6 | from pydatastructs.utils import TreeNode 7 | from pydatastructs.utils._backend.cpp import _nodes 8 | 9 | def test_OneDimensionalArray(): 10 | ODA = OneDimensionalArray 11 | A = ODA(int, 5, [1.0, 2, 3, 4, 5], init=6) 12 | A[1] = 2.0 13 | assert str(A) == '[1, 2, 3, 4, 5]' 14 | assert A 15 | assert ODA(int, [1.0, 2, 3, 4, 5], 5) 16 | assert ODA(int, 5) 17 | assert ODA(int, [1.0, 2, 3]) 18 | assert raises(IndexError, lambda: A[7]) 19 | assert raises(IndexError, lambda: A[-1]) 20 | assert raises(ValueError, lambda: ODA()) 21 | assert raises(ValueError, lambda: ODA(int, 1, 2, 3)) 22 | assert raises(TypeError, lambda: ODA(int, 5.0, set([1, 2, 3]))) 23 | assert raises(TypeError, lambda: ODA(int, 5.0)) 24 | assert raises(TypeError, lambda: ODA(int, set([1, 2, 3]))) 25 | assert raises(ValueError, lambda: ODA(int, 3, [1])) 26 | 27 | A = ODA(int, 5, [1, 2, 3, 4, 5], init=6, backend=Backend.CPP) 28 | A[1] = 2 29 | assert str(A) == "['1', '2', '3', '4', '5']" 30 | assert A 31 | assert ODA(int, [1, 2, 3, 4, 5], 5, backend=Backend.CPP) 32 | assert ODA(int, 5, backend=Backend.CPP) 33 | assert ODA(int, [1, 2, 3], backend=Backend.CPP) 34 | assert raises(TypeError, lambda: ODA(int, [1.0, 2, 3, 4, 5], 5, backend=Backend.CPP)) 35 | assert raises(TypeError, lambda: ODA(int, [1.0, 2, 3], backend=Backend.CPP)) 36 | assert raises(IndexError, lambda: A[7]) 37 | assert raises(IndexError, lambda: A[-1]) 38 | assert raises(ValueError, lambda: ODA(backend=Backend.CPP)) 39 | assert raises(ValueError, lambda: ODA(int, 1, 2, 3, backend=Backend.CPP)) 40 | assert raises(TypeError, lambda: ODA(int, 5.0, set([1, 2, 3]), backend=Backend.CPP)) 41 | assert raises(TypeError, lambda: ODA(int, 5.0, backend=Backend.CPP)) 42 | assert raises(TypeError, lambda: ODA(int, set([1, 2, 3]), backend=Backend.CPP)) 43 | assert raises(ValueError, lambda: ODA(int, 3, [1], backend=Backend.CPP)) 44 | assert raises(ValueError, lambda: ODA(int, 3, [1], backend=Backend.CPP)) 45 | assert raises(TypeError, lambda: A.fill(2.0)) 46 | 47 | 48 | def test_MultiDimensionalArray(): 49 | assert raises(ValueError, lambda: MultiDimensionalArray(int, 2, -1, 3)) 50 | assert MultiDimensionalArray(int, 10).shape == (10,) 51 | array = MultiDimensionalArray(int, 5, 9, 3, 8) 52 | assert array.shape == (5, 9, 3, 8) 53 | array.fill(5) 54 | array[1, 3, 2, 5] = 2.0 55 | assert array 56 | assert array[1, 3, 2, 5] == 2.0 57 | assert array[1, 3, 0, 5] == 5 58 | assert array[1, 2, 2, 5] == 5 59 | assert array[2, 3, 2, 5] == 5 60 | assert raises(IndexError, lambda: array[5]) 61 | assert raises(IndexError, lambda: array[4, 10]) 62 | assert raises(IndexError, lambda: array[-1]) 63 | assert raises(IndexError, lambda: array[2, 3, 2, 8]) 64 | assert raises(ValueError, lambda: MultiDimensionalArray()) 65 | assert raises(ValueError, lambda: MultiDimensionalArray(int)) 66 | assert raises(TypeError, lambda: MultiDimensionalArray(int, 5, 6, "")) 67 | array = MultiDimensionalArray(int, 3, 2, 2) 68 | array.fill(1) 69 | array[0, 0, 0] = 0 70 | array[0, 0, 1] = 0 71 | array[1, 0, 0] = 0 72 | array[2, 1, 1] = 0 73 | assert str(array) == '[0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0]' 74 | array = MultiDimensionalArray(int, 4) 75 | assert array.shape == (4,) 76 | array.fill(5) 77 | array[3] = 3 78 | assert array[3] == 3 79 | 80 | def test_DynamicOneDimensionalArray(): 81 | DODA = DynamicOneDimensionalArray 82 | A = DODA(int, 0) 83 | A.append(1) 84 | A.append(2) 85 | A.append(3) 86 | A.append(4) 87 | assert str(A) == "['1', '2', '3', '4']" 88 | A.delete(0) 89 | A.delete(0) 90 | A.delete(15) 91 | A.delete(-1) 92 | A.delete(1) 93 | A.delete(2) 94 | assert A._data == [4, None, None] 95 | assert str(A) == "['4']" 96 | assert A.size == 3 97 | A.fill(4) 98 | assert A._data == [4, 4, 4] 99 | b = DynamicOneDimensionalArray(int, 0) 100 | b.append(1) 101 | b.append(2) 102 | b.append(3) 103 | b.append(4) 104 | b.append(5) 105 | assert b._data == [1, 2, 3, 4, 5, None, None] 106 | assert list(reversed(b)) == [5, 4, 3, 2, 1] 107 | 108 | A = DODA(int, 0, backend=Backend.CPP) 109 | A.append(1) 110 | A.append(2) 111 | A.append(3) 112 | A.append(4) 113 | assert str(A) == "['1', '2', '3', '4']" 114 | A.delete(0) 115 | A.delete(0) 116 | A.delete(15) 117 | A.delete(-1) 118 | A.delete(1) 119 | A.delete(2) 120 | assert [A[i] for i in range(A.size)] == [4, None, None] 121 | assert A.size == 3 122 | A.fill(4) 123 | assert [A[0], A[1], A[2]] == [4, 4, 4] 124 | b = DODA(int, 0, backend=Backend.CPP) 125 | b.append(1) 126 | b.append(2) 127 | b.append(3) 128 | b.append(4) 129 | b.append(5) 130 | assert [b[i] for i in range(b.size)] == [1, 2, 3, 4, 5, None, None] 131 | 132 | def test_DynamicOneDimensionalArray2(): 133 | DODA = DynamicOneDimensionalArray 134 | root = TreeNode(1, 100) 135 | A = DODA(TreeNode, [root]) 136 | assert str(A[0]) == "(None, 1, 100, None)" 137 | 138 | def _test_ArrayForTrees(backend): 139 | AFT = ArrayForTrees 140 | root = TreeNode(1, 100,backend=backend) 141 | if backend==Backend.PYTHON: 142 | A = AFT(TreeNode, [root], backend=backend) 143 | B = AFT(TreeNode, 0, backend=backend) 144 | else: 145 | A = AFT(_nodes.TreeNode, [root], backend=backend) 146 | B = AFT(_nodes.TreeNode, 0, backend=backend) 147 | assert str(A) == "['(None, 1, 100, None)']" 148 | node = TreeNode(2, 200, backend=backend) 149 | A.append(node) 150 | assert str(A) == "['(None, 1, 100, None)', '(None, 2, 200, None)']" 151 | assert str(B) == "[]" 152 | 153 | def test_ArrayForTrees(): 154 | _test_ArrayForTrees(Backend.PYTHON) 155 | 156 | def test_cpp_ArrayForTrees(): 157 | _test_ArrayForTrees(Backend.CPP) 158 | -------------------------------------------------------------------------------- /pydatastructs/linear_data_structures/tests/benchmarks/test_arrays.py: -------------------------------------------------------------------------------- 1 | import random, timeit, functools, os, pytest 2 | from pydatastructs import (OneDimensionalArray, Backend, 3 | DynamicOneDimensionalArray) 4 | 5 | def _test_OneDimensionalArray_DynamicOneDimensionalArray(array_type): 6 | cpp = Backend.CPP 7 | repeat = 2 8 | number = 1000 9 | 10 | def _test___new__(size): 11 | 12 | def _common(dtype, *args, **kwargs): 13 | timer_python = timeit.Timer(functools.partial(array_type, dtype, *args, **kwargs)) 14 | python_backend = min(timer_python.repeat(repeat, number)) 15 | 16 | kwargs["backend"] = cpp 17 | timer_cpp = timeit.Timer(functools.partial(array_type, dtype, *args, **kwargs)) 18 | cpp_backend = min(timer_cpp.repeat(repeat, number)) 19 | 20 | assert cpp_backend < python_backend 21 | 22 | # Case 1: dtype, size 23 | _common(int, size) 24 | 25 | # Case 2: dtype, list 26 | data = [random.randint(0, 2 * size) for _ in range(size)] 27 | _common(int, data) 28 | 29 | # Case 3: dtype, size, init 30 | _common(float, size, init=random.random() * 2 * size) 31 | 32 | def _test___getitem_____setitem__(size, func): 33 | array_python = array_type(float, size) 34 | array_cpp = array_type(float, size, backend=cpp) 35 | python_backend = 0 36 | cpp_backend = 0 37 | for idx in range(size): 38 | timer_python = timeit.Timer(functools.partial(func, array_python, idx)) 39 | timer_cpp = timeit.Timer(functools.partial(func, array_cpp, idx)) 40 | python_backend += min(timer_python.repeat(repeat, number)) 41 | cpp_backend += min(timer_cpp.repeat(repeat, number)) 42 | 43 | assert cpp_backend < python_backend 44 | 45 | def _test___getitem__(size): 46 | def func(array, i): 47 | return array[i] 48 | _test___getitem_____setitem__(size, func) 49 | 50 | def _test___setitem__(size): 51 | def func(array, i): 52 | array[i] = random.random() * 2 * size 53 | _test___getitem_____setitem__(size, func) 54 | 55 | def _test_fill(size): 56 | array_python = array_type(float, size) 57 | array_cpp = array_type(float, size, backend=cpp) 58 | value = random.random() 59 | 60 | timer_python = timeit.Timer(functools.partial(array_python.fill, value)) 61 | python_backend = min(timer_python.repeat(repeat, number)) 62 | 63 | timer_cpp = timeit.Timer(functools.partial(array_cpp.fill, value)) 64 | cpp_backend = min(timer_cpp.repeat(repeat, number)) 65 | 66 | assert cpp_backend < python_backend 67 | 68 | def _test_len(size): 69 | array_python = array_type(float, size) 70 | array_cpp = array_type(float, size, backend=cpp) 71 | 72 | timer_python = timeit.Timer(functools.partial(len, array_python)) 73 | python_backend = min(timer_python.repeat(repeat, number * 10)) 74 | 75 | timer_cpp = timeit.Timer(functools.partial(len, array_cpp)) 76 | cpp_backend = min(timer_cpp.repeat(repeat, number * 10)) 77 | 78 | assert cpp_backend < python_backend 79 | 80 | 81 | size = int(os.environ.get("PYDATASTRUCTS_BENCHMARK_SIZE", "1000")) 82 | _test___new__(size) 83 | _test___getitem__(size) 84 | _test___setitem__(size) 85 | _test_fill(size) 86 | _test_len(size) 87 | 88 | @pytest.mark.xfail 89 | def test_OneDimensionalArray(): 90 | _test_OneDimensionalArray_DynamicOneDimensionalArray(OneDimensionalArray) 91 | 92 | @pytest.mark.xfail 93 | def test_DynamicOneDimensionalArray(): 94 | _test_OneDimensionalArray_DynamicOneDimensionalArray(DynamicOneDimensionalArray) 95 | 96 | repeat = 2 97 | number = 1000 98 | 99 | def _test_append(size): 100 | data = [random.randint(-size, size) for _ in range(size)] 101 | doda_cpp = DynamicOneDimensionalArray(int, 0, backend=Backend.CPP) 102 | doda_python = DynamicOneDimensionalArray(int, 0) 103 | python_list = [] 104 | 105 | cpp_backend, python_backend, list_time = (0, 0, 0) 106 | for datum in data: 107 | timer_cpp = timeit.Timer(functools.partial(doda_cpp.append, datum)) 108 | cpp_backend += min(timer_cpp.repeat(repeat, number)) 109 | 110 | timer_python = timeit.Timer(functools.partial(doda_python.append, datum)) 111 | python_backend += min(timer_python.repeat(repeat, number)) 112 | 113 | timer_list = timeit.Timer(functools.partial(python_list.append, datum)) 114 | list_time += min(timer_list.repeat(repeat, number)) 115 | 116 | assert cpp_backend < python_backend 117 | assert cpp_backend/list_time < 1.5 118 | 119 | def _test_delete(size): 120 | data = [random.random() * 2 * size for _ in range(size)] 121 | doda_cpp = DynamicOneDimensionalArray(float, data, backend=Backend.CPP) 122 | doda_python = DynamicOneDimensionalArray(float, data) 123 | python_list = [datum for datum in data] 124 | list_indices = [i for i in range(size)] 125 | random.seed(0) 126 | random.shuffle(list_indices) 127 | 128 | def _list_remove(obj, index): 129 | del obj[index] 130 | 131 | list_time = 0 132 | for i in range(size): 133 | idx = list_indices[i] 134 | 135 | timer_list = timeit.Timer(functools.partial(_list_remove, python_list, idx)) 136 | list_time += min(timer_list.repeat(1, 1)) 137 | 138 | for j in range(i + 1, size): 139 | if list_indices[j] > idx: 140 | list_indices[j] -= 1 141 | 142 | cpp_backend, python_backend = (0, 0) 143 | while doda_cpp._num > 0: 144 | indices = [i for i in range(doda_cpp._last_pos_filled + 1)] 145 | random.shuffle(indices) 146 | for idx in indices: 147 | timer_cpp = timeit.Timer(functools.partial(doda_cpp.delete, idx)) 148 | cpp_backend += min(timer_cpp.repeat(1, 1)) 149 | 150 | timer_python = timeit.Timer(functools.partial(doda_python.delete, idx)) 151 | python_backend += min(timer_python.repeat(1, 1)) 152 | 153 | if doda_cpp._num == 0: 154 | break 155 | 156 | assert cpp_backend < python_backend 157 | assert cpp_backend < list_time 158 | 159 | size = int(os.environ.get("PYDATASTRUCTS_BENCHMARK_SIZE", "1000")) 160 | _test_append(size) 161 | _test_delete(size * 4) 162 | -------------------------------------------------------------------------------- /pydatastructs/trees/_backend/cpp/BinaryIndexedTree.hpp: -------------------------------------------------------------------------------- 1 | #ifndef TREES_BINARYINDEXEDTREE_HPP 2 | #define TREES_BINARYINDEXEDTREE_HPP 3 | 4 | #define PY_SSIZE_T_CLEAN 5 | #include 6 | #include 7 | #include 8 | #include "../../../utils/_backend/cpp/utils.hpp" 9 | #include "../../../utils/_backend/cpp/TreeNode.hpp" 10 | #include "../../../linear_data_structures/_backend/cpp/arrays/ArrayForTrees.hpp" 11 | #include "../../../linear_data_structures/_backend/cpp/arrays/DynamicOneDimensionalArray.hpp" 12 | 13 | // Copied binary trees and changed the name to BinaryIndexedTree 14 | // Start from the struct 15 | 16 | typedef struct { 17 | PyObject_HEAD 18 | OneDimensionalArray* array; 19 | PyObject* tree; 20 | PyObject* flag; 21 | } BinaryIndexedTree; 22 | 23 | static void BinaryIndexedTree_dealloc(BinaryIndexedTree *self) { 24 | Py_TYPE(self)->tp_free(reinterpret_cast(self)); 25 | } 26 | 27 | static PyObject* BinaryIndexedTree_update(BinaryIndexedTree* self, PyObject *args) { 28 | long index = PyLong_AsLong(PyObject_GetItem(args, PyZero)); 29 | long value = PyLong_AsLong(PyObject_GetItem(args, PyOne)); 30 | long _index = index; 31 | long _value = value; 32 | if (PyList_GetItem(self->flag, index) == PyZero) { 33 | PyList_SetItem(self->flag, index, PyOne); 34 | index += 1; 35 | while (index < self->array->_size + 1) { 36 | long curr = PyLong_AsLong(PyList_GetItem(self->tree, index)); 37 | PyList_SetItem(self->tree, index, PyLong_FromLong(curr + value)); 38 | index = index + (index & (-1*index)); 39 | } 40 | } 41 | else { 42 | value = value - PyLong_AsLong(self->array->_data[index]); 43 | index += 1; 44 | while (index < self->array->_size + 1) { 45 | long curr = PyLong_AsLong(PyList_GetItem(self->tree, index)); 46 | PyList_SetItem(self->tree, index, PyLong_FromLong(curr + value)); 47 | index = index + (index & (-1*index)); 48 | } 49 | } 50 | self->array->_data[_index] = PyLong_FromLong(_value); 51 | Py_RETURN_NONE; 52 | } 53 | 54 | static PyObject* BinaryIndexedTree___new__(PyTypeObject* type, PyObject *args, PyObject *kwds) { 55 | BinaryIndexedTree *self; 56 | self = reinterpret_cast(type->tp_alloc(type, 0)); 57 | 58 | // Python code is such that arguments are: type(array[0]) and array 59 | 60 | if (PyType_Ready(&OneDimensionalArrayType) < 0) { // This has to be present to finalize a type object. This should be called on all type objects to finish their initialization. 61 | return NULL; 62 | } 63 | PyObject* _one_dimensional_array = OneDimensionalArray___new__(&OneDimensionalArrayType, args, kwds); 64 | if ( !_one_dimensional_array ) { 65 | return NULL; 66 | } 67 | self->array = reinterpret_cast(_one_dimensional_array); 68 | self->tree = PyList_New(self->array->_size+2); 69 | for(int i=0;iarray->_size+2;i++) { 70 | PyList_SetItem(self->tree, i, PyZero); 71 | } 72 | self->flag = PyList_New(self->array->_size); 73 | for(int i=0;iarray->_size;i++) { 74 | PyList_SetItem(self->flag, i, PyZero); 75 | BinaryIndexedTree_update(self, Py_BuildValue("(OO)", PyLong_FromLong(i), self->array->_data[i])); 76 | } 77 | 78 | return reinterpret_cast(self); 79 | } 80 | 81 | static PyObject* BinaryIndexedTree_get_prefix_sum(BinaryIndexedTree* self, PyObject *args) { 82 | long index = PyLong_AsLong(PyObject_GetItem(args, PyZero)); 83 | index += 1; 84 | long sum = 0; 85 | while (index > 0) { 86 | sum += PyLong_AsLong(PyList_GetItem(self->tree, index)); 87 | index = index - (index & (-1*index)); 88 | } 89 | 90 | return PyLong_FromLong(sum); 91 | } 92 | 93 | static PyObject* BinaryIndexedTree_get_sum(BinaryIndexedTree* self, PyObject *args) { 94 | long left_index = PyLong_AsLong(PyObject_GetItem(args, PyZero)); 95 | long right_index = PyLong_AsLong(PyObject_GetItem(args, PyOne)); 96 | if (left_index >= 1) { 97 | long l1 = PyLong_AsLong(BinaryIndexedTree_get_prefix_sum(self, Py_BuildValue("(O)", PyLong_FromLong(right_index)))); 98 | long l2 = PyLong_AsLong(BinaryIndexedTree_get_prefix_sum(self, Py_BuildValue("(O)", PyLong_FromLong(left_index - 1)))); 99 | return PyLong_FromLong(l1 - l2); 100 | } 101 | else { 102 | return BinaryIndexedTree_get_prefix_sum(self, Py_BuildValue("(O)", PyLong_FromLong(right_index))); 103 | } 104 | } 105 | 106 | 107 | static struct PyMethodDef BinaryIndexedTree_PyMethodDef[] = { 108 | {"update", (PyCFunction) BinaryIndexedTree_update, METH_VARARGS, NULL}, 109 | {"get_prefix_sum", (PyCFunction) BinaryIndexedTree_get_prefix_sum, METH_VARARGS, NULL}, 110 | {"get_sum", (PyCFunction) BinaryIndexedTree_get_sum, METH_VARARGS, NULL}, 111 | {NULL} 112 | }; 113 | 114 | static PyMemberDef BinaryIndexedTree_PyMemberDef[] = { 115 | {"array", T_OBJECT_EX, offsetof(BinaryIndexedTree, array), 0, "array"}, 116 | {"tree", T_OBJECT_EX, offsetof(BinaryIndexedTree, tree), 0, "tree"}, 117 | {"flag", T_OBJECT_EX, offsetof(BinaryIndexedTree, flag), 0, "flag"}, 118 | {NULL} /* Sentinel */ 119 | }; 120 | 121 | 122 | static PyTypeObject BinaryIndexedTreeType = { 123 | /* tp_name */ PyVarObject_HEAD_INIT(NULL, 0) "BinaryIndexedTree", 124 | /* tp_basicsize */ sizeof(BinaryIndexedTree), 125 | /* tp_itemsize */ 0, 126 | /* tp_dealloc */ (destructor) BinaryIndexedTree_dealloc, 127 | /* tp_print */ 0, 128 | /* tp_getattr */ 0, 129 | /* tp_setattr */ 0, 130 | /* tp_reserved */ 0, 131 | /* tp_repr */ 0, 132 | /* tp_as_number */ 0, 133 | /* tp_as_sequence */ 0, 134 | /* tp_as_mapping */ 0, 135 | /* tp_hash */ 0, 136 | /* tp_call */ 0, 137 | /* tp_str */ 0, 138 | /* tp_getattro */ 0, 139 | /* tp_setattro */ 0, 140 | /* tp_as_buffer */ 0, 141 | /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, 142 | /* tp_doc */ 0, 143 | /* tp_traverse */ 0, 144 | /* tp_clear */ 0, 145 | /* tp_richcompare */ 0, 146 | /* tp_weaklistoffset */ 0, 147 | /* tp_iter */ 0, 148 | /* tp_iternext */ 0, 149 | /* tp_methods */ BinaryIndexedTree_PyMethodDef, 150 | /* tp_members */ BinaryIndexedTree_PyMemberDef, 151 | /* tp_getset */ 0, 152 | /* tp_base */ &PyBaseObject_Type, 153 | /* tp_dict */ 0, 154 | /* tp_descr_get */ 0, 155 | /* tp_descr_set */ 0, 156 | /* tp_dictoffset */ 0, 157 | /* tp_init */ 0, 158 | /* tp_alloc */ 0, 159 | /* tp_new */ BinaryIndexedTree___new__, 160 | }; 161 | 162 | #endif 163 | --------------------------------------------------------------------------------