├── .gitignore ├── Advanced_Algorithms_Design └── Dynamic_programming │ ├── MatrixChainMultipication.py │ └── rod_cutting_problem.py ├── Augmenting_Data_Structures ├── IntervalTree │ ├── IntervalTree.py │ └── Interval_tree_in_practice.py └── RedBlackTree_size.py ├── Convolution └── Naive │ └── Naive_Convolution.cu ├── Graph_algorithms ├── AllPairsShortestPaths │ ├── FloydWarshall.py │ ├── Johnson.py │ └── ReadME.md ├── Elementary-Algorithms │ ├── BFS │ │ ├── BFS.py │ │ ├── Bipartite.py │ │ └── treediameter_shortestpath.py │ ├── DFS │ │ ├── DFS.py │ │ ├── SCC.py │ │ └── TopologicalSort.py │ ├── intro.md │ └── rep.py ├── MinimumSpanningTree │ ├── kruskal.py │ ├── prim.py │ └── prim_fibonacci.py └── Single-Source-Shortest-Path │ ├── BellmanFord.py │ └── Dijkstra.py ├── LICENSE ├── Medians-and-Order-Statistics ├── Selection_in_expected_linear_time.py └── Selection_in_worst_case_linear_time.py ├── README.md ├── Sorts_Algorithms ├── README.md ├── Sort_comparisons.py └── bucket_sort.py ├── Trees ├── AVLTree │ ├── AVLTree.py │ ├── GUI.py │ ├── main.py │ └── utils.py ├── BTrees │ └── BPlussTree.py └── RedBlackTree │ ├── README.md │ └── RedBlackTree.py └── pi_estimation.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # UV 98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | #uv.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 116 | .pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 121 | __pypackages__/ 122 | 123 | # Celery stuff 124 | celerybeat-schedule 125 | celerybeat.pid 126 | 127 | # SageMath parsed files 128 | *.sage.py 129 | 130 | # Environments 131 | .env 132 | .venv 133 | env/ 134 | venv/ 135 | ENV/ 136 | env.bak/ 137 | venv.bak/ 138 | 139 | # Spyder project settings 140 | .spyderproject 141 | .spyproject 142 | 143 | # Rope project settings 144 | .ropeproject 145 | 146 | # mkdocs documentation 147 | /site 148 | 149 | # mypy 150 | .mypy_cache/ 151 | .dmypy.json 152 | dmypy.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 166 | # and can be added to the global gitignore or merged into this file. For a more nuclear 167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 168 | #.idea/ 169 | 170 | # PyPI configuration file 171 | .pypirc 172 | -------------------------------------------------------------------------------- /Advanced_Algorithms_Design/Dynamic_programming/MatrixChainMultipication.py: -------------------------------------------------------------------------------- 1 | 2 | def matrix_chain_order(dims): 3 | """ 4 | Compute the minimum number of scalar multiplications needed 5 | to multiply a chain of matrices. 6 | m[i, j] = m[i, k] + m[k + 1, j] + pi−1 pk pj. 7 | 8 | Args: 9 | dims (list): Dimensions of matrices such that the i-th matrix has dimensions 10 | dims[i-1] x dims[i]. 11 | 12 | Returns: 13 | tuple: A tuple containing: 14 | - min_cost (int): Minimum number of scalar multiplications. 15 | - s (list): A table used to reconstruct the optimal parenthesization. 16 | """ 17 | n = len(dims) - 1 # Number of matrices 18 | m = [[0] * n for _ in range(n)] # Minimum cost table 19 | s = [[0] * n for _ in range(n)] # Table to store splits 20 | 21 | # Length of the chain (l is the number of matrices being multiplied) 22 | for l in range(2, n + 1): # l = 2 means pairs, l = 3 means triplets, and so on 23 | for i in range(n - l + 1): 24 | j = i + l - 1 25 | m[i][j] = float('inf') # Initialize to infinity 26 | # Test all possible places to split the product 27 | for k in range(i, j): 28 | cost = m[i][k] + m[k + 1][j] + dims[i] * dims[k + 1] * dims[j + 1] 29 | if cost < m[i][j]: 30 | m[i][j] = cost 31 | s[i][j] = k 32 | 33 | return m[0][n - 1], s 34 | 35 | 36 | def print_optimal_parens(s, i, j): 37 | """ 38 | Utility to print the optimal parenthesization. 39 | 40 | Args: 41 | s (list): Table used to reconstruct the optimal parenthesization. 42 | i (int): Starting index of the chain. 43 | j (int): Ending index of the chain. 44 | 45 | Returns: 46 | str: Optimal parenthesization as a string. 47 | """ 48 | if i == j: 49 | return f"M{i + 1}" 50 | else: 51 | return f"({print_optimal_parens(s, i, s[i][j])} x {print_optimal_parens(s, s[i][j] + 1, j)})" 52 | 53 | 54 | # Example usage 55 | if __name__ == "__main__": 56 | # Dimensions of matrices: M1 = 10x20, M2 = 20x30, M3 = 30x40, M4 = 40x30 57 | # 10 20 30 40 58 | 59 | dims = [20, 40, 30, 40, 50] 60 | min_cost, s = matrix_chain_order(dims) 61 | print("Minimum number of scalar multiplications:", min_cost) 62 | print("Optimal parenthesization:", print_optimal_parens(s, 0, len(dims) - 2)) 63 | -------------------------------------------------------------------------------- /Advanced_Algorithms_Design/Dynamic_programming/rod_cutting_problem.py: -------------------------------------------------------------------------------- 1 | import time # Import the time module to measure execution time 2 | from typing import Tuple , List 3 | """ 4 | Given a rod of length n 5 | inches and a table of prices pi for i = 1, 2, …, n, determine the maximumrevenue r 6 | n obtainable by cutting up the rod and selling the pieces. If the 7 | price pn for a rod of length n is large enough, an optimal solution might 8 | require no cutting at all. 9 | """ 10 | 11 | # this function may get so slow when n grow large(more than 35) 12 | # this program take so long to compute the solution because it recusively calls itself with the same parameter 13 | # time complexity using recursion tree: T(n) == O(2^n) 14 | # so we need to use dynamic programming to achieve higher speed 15 | def CUT_ROD(P: list, n: int) -> int: 16 | if n == 0 or len(P) == 0: 17 | return 0 18 | q = float('-inf') 19 | for i in range(1, n + 1): 20 | q = max(q, P[i - 1] + CUT_ROD(P, n - i)) 21 | return q 22 | 23 | 24 | # aving subproblem solutions comes with a cost: the additional 25 | # memory needed to store solutions. Dynamic programming thus serves 26 | # as an example of a time-memory trade-off 27 | # T(n) == O(n^2) 28 | # we can use two approaches 29 | # 1. top-down with memoization In this approach 30 | # 2. bottom-up method 31 | # you write the procedure recursively in a natural manner, but modified to 32 | # save the result of each subproblem (usually in an array or hash table). 33 | # These two approaches yield algorithms with the same asymptotic 34 | # running time, except in unusual circumstances where the top-down 35 | # approach does not actually recurse to examine all possible subproblems. 36 | # The bottom-up approach often has much better constant factors, since 37 | # it has lower overhead for procedure calls. 38 | 39 | def Memoized_CUT_ROD_AUX(p: list, n: int, r: list): 40 | if r[n] >= 0: # check if there is already a found solution 41 | return r[n] 42 | if n == 0: 43 | q = 0 44 | else: 45 | q = float('-inf') 46 | for i in range(1, n + 1): 47 | q = max(q, p[i - 1] + Memoized_CUT_ROD_AUX(p, n - i, r)) 48 | r[n] = q # remember the solution value for length n 49 | return q 50 | 51 | def Memoized_CUT_ROD(p: list, n: int): 52 | r = [float('-inf')] * (n + 1) 53 | return Memoized_CUT_ROD_AUX(p, n, r) 54 | 55 | def BOTTOM_UP_CUT_ROD(p: list, n: int) -> int: 56 | r = [0] * (n + 1) 57 | r[0] = 0 58 | for j in range(1, n + 1): 59 | q = float('-inf') 60 | for i in range(1, j + 1): 61 | q = max(q, p[i - 1] + r[j - i]) 62 | r[j] = q 63 | return r[n] 64 | 65 | 66 | 67 | def EXTENDED_BOTTOM_UP_CUT_ROD(p: List[int], n: int) -> Tuple[List[int], List[int]]: 68 | r = [0] * (n + 1) 69 | s = [0] * n 70 | for j in range(1, n + 1): 71 | q = float('-inf') 72 | for i in range(1, j + 1): 73 | if q < p[i - 1] + r[j - i]: 74 | q = p[i - 1] + r[j - i] 75 | s[j - 1] = i 76 | r[j] = q 77 | return r[1:], s 78 | 79 | if __name__ == "__main__": 80 | prices = [1, 5, 8, 9, 10, 17, 17, 20, 22, 25, 26, 28, 30, 32, 34, 56, 34, 50, 59, 66] # Example prices for rod lengths 81 | n = len(prices) # Length of the rod 82 | 83 | # Measure time for CUT_ROD 84 | start_time = time.perf_counter() 85 | result = CUT_ROD(prices, n) 86 | end_time = time.perf_counter() 87 | print(f"Maximum revenue for cutting the rod is: {result}") 88 | print(f"Time taken by CUT_ROD: {end_time - start_time:.6f} seconds\n") 89 | 90 | # Measure time for Memoized_CUT_ROD 91 | start_time = time.perf_counter() 92 | result = Memoized_CUT_ROD(prices, n) 93 | end_time = time.perf_counter() 94 | print(f"Maximum revenue for cutting the rod (Memoized): {result}") 95 | print(f"Time taken by Memoized_CUT_ROD: {end_time - start_time:.6f} seconds\n") 96 | 97 | # Measure time for BOTTOM_UP_CUT_ROD 98 | start_time = time.perf_counter() 99 | result = BOTTOM_UP_CUT_ROD(prices, n) 100 | end_time = time.perf_counter() 101 | print(f"Maximum revenue for cutting the rod (Bottom-Up): {result}") 102 | print(f"Time taken by BOTTOM_UP_CUT_ROD: {end_time - start_time:.6f} seconds\n") 103 | 104 | start_time = time.perf_counter() 105 | result = EXTENDED_BOTTOM_UP_CUT_ROD(prices , n) 106 | end_time = time.perf_counter() 107 | print(f"Maximum revenue for cutting the rod (Extended Bottom Up): {result}") 108 | print(f"time taken by (Extended Bottom Up): {end_time - start_time:.6f}") -------------------------------------------------------------------------------- /Augmenting_Data_Structures/IntervalTree/IntervalTree.py: -------------------------------------------------------------------------------- 1 | import tkinter as tk 2 | from tkinter import messagebox 3 | from math import cos, sin, radians 4 | 5 | def floats_are_equal(a, b, eps=1e-3): 6 | return abs(a - b) < eps 7 | 8 | 9 | class Interval: 10 | def __init__(self, low, high, data=None): 11 | self.low = low 12 | self.high = high 13 | self.data = data 14 | 15 | def __repr__(self): 16 | return f"[{self.low}, {self.high}]" 17 | 18 | class Node: 19 | def __init__(self, interval): 20 | self.interval = interval 21 | self.max = interval.high 22 | self.left = None 23 | self.right = None 24 | self.parent = None 25 | self.size = 1 26 | self.color = 'BLACK' # Added color attribute 27 | 28 | class IntervalTree: 29 | def __init__(self): 30 | self.root = None 31 | 32 | def _update_size(self, node): 33 | if node is None: 34 | return 35 | node.size = (node.left.size if node.left else 0) + (node.right.size if node.right else 0) + 1 36 | 37 | def _update_max(self, node): 38 | if node is None: 39 | return 40 | node.max = max(node.interval.high, 41 | node.left.max if node.left else node.interval.high, 42 | node.right.max if node.right else node.interval.high 43 | ) 44 | 45 | def _transplant(self, u, v): 46 | if u.parent is None: 47 | self.root = v 48 | elif u == u.parent.left: 49 | u.parent.left = v 50 | else: 51 | u.parent.right = v 52 | if v is not None: 53 | v.parent = u.parent 54 | 55 | current = v 56 | while current: 57 | self._update_size(current) 58 | self._update_max(current) 59 | current = current.parent 60 | 61 | def _minimum(self, node): 62 | while node.left: 63 | node = node.left 64 | return node 65 | 66 | def _left_rotate(self, x): 67 | y = x.right 68 | x.right = y.left 69 | if y.left: 70 | y.left.parent = x 71 | y.parent = x.parent 72 | if not x.parent: 73 | self.root = y 74 | elif x == x.parent.left: 75 | x.parent.left = y 76 | else: 77 | x.parent.right = y 78 | y.left = x 79 | x.parent = y 80 | 81 | self._update_size(x) 82 | self._update_max(x) 83 | self._update_size(y) 84 | self._update_max(y) 85 | 86 | def _right_rotate(self, y): 87 | x = y.left 88 | y.left = x.right 89 | if x.right: 90 | x.right.parent = y 91 | x.parent = y.parent 92 | if not y.parent: 93 | self.root = x 94 | elif y == y.parent.left: 95 | y.parent.left = x 96 | else: 97 | y.parent.right = x 98 | x.right = y 99 | y.parent = x 100 | 101 | self._update_size(y) 102 | self._update_max(y) 103 | self._update_size(x) 104 | self._update_max(x) 105 | 106 | def _update_ancestor_size_on_insert(self,node): 107 | current = node.parent 108 | while current: 109 | self._update_size(current) 110 | self._update_max(current) 111 | current = current.parent 112 | 113 | def _fix_insert(self, z): 114 | z.color = 'RED' 115 | while z != self.root and z.parent.color == 'RED': 116 | if z.parent == z.parent.parent.left: 117 | y = z.parent.parent.right 118 | if y and y.color == 'RED': 119 | z.parent.color = 'BLACK' 120 | y.color = 'BLACK' 121 | z.parent.parent.color = 'RED' 122 | z = z.parent.parent 123 | else: 124 | if z == z.parent.right: 125 | z = z.parent 126 | self._left_rotate(z) 127 | z.parent.color = 'BLACK' 128 | z.parent.parent.color = 'RED' 129 | self._right_rotate(z.parent.parent) 130 | else: 131 | y = z.parent.parent.left 132 | if y and y.color == 'RED': 133 | z.parent.color = 'BLACK' 134 | y.color = 'BLACK' 135 | z.parent.parent.color = 'RED' 136 | z = z.parent.parent 137 | else: 138 | if z == z.parent.left: 139 | z = z.parent 140 | self._right_rotate(z) 141 | z.parent.color = 'BLACK' 142 | z.parent.parent.color = 'RED' 143 | self._left_rotate(z.parent.parent) 144 | self.root.color = 'BLACK' 145 | 146 | 147 | def insert(self, interval): 148 | new_node = Node(interval) 149 | if self.root is None: 150 | self.root = new_node 151 | else: 152 | curr = self.root 153 | parent = None 154 | while curr: 155 | parent = curr 156 | if interval.low < curr.interval.low: 157 | curr = curr.left 158 | else: 159 | curr = curr.right 160 | new_node.parent = parent 161 | if interval.low < parent.interval.low: 162 | parent.left = new_node 163 | else: 164 | parent.right = new_node 165 | 166 | self._update_ancestor_size_on_insert(new_node) 167 | self._fix_insert(new_node) 168 | 169 | def _fix_delete(self, x): 170 | while x != self.root and (x.max < (x.parent.max if x.parent else 0) ): 171 | if x == x.parent.left: 172 | s = x.parent.right 173 | if s and (s.max >= (x.parent.max if x.parent else 0)): 174 | break 175 | if s and s.max < (x.parent.max if x.parent else 0): 176 | if s.right and s.right.max >= (x.parent.max if x.parent else 0): 177 | s.max = x.parent.max if x.parent else 0 178 | self._left_rotate(x.parent) 179 | x = self.root 180 | else: 181 | if s.left and s.left.max >= (x.parent.max if x.parent else 0) : 182 | self._right_rotate(s) 183 | s = x.parent.right 184 | 185 | s.max = x.parent.max if x.parent else 0 186 | self._left_rotate(x.parent) 187 | x = self.root 188 | 189 | else: 190 | s = x.parent.left 191 | if s and s.max >= (x.parent.max if x.parent else 0): 192 | break 193 | if s and s.max < (x.parent.max if x.parent else 0): 194 | if s.left and s.left.max >= (x.parent.max if x.parent else 0): 195 | s.max = x.parent.max if x.parent else 0 196 | self._right_rotate(x.parent) 197 | x = self.root 198 | else: 199 | if s.right and s.right.max >= (x.parent.max if x.parent else 0): 200 | self._left_rotate(s) 201 | s = x.parent.left 202 | 203 | s.max = x.parent.max if x.parent else 0 204 | self._right_rotate(x.parent) 205 | x = self.root 206 | if x.max < self.root.max: 207 | self.root.max = self.root.right.max if self.root.right else self.root.interval.high 208 | 209 | def _delete_fix_color(self, node): 210 | if node: 211 | node.color = 'BLACK' 212 | 213 | 214 | def delete(self, interval): 215 | node = self.root 216 | z = None 217 | while node: 218 | if floats_are_equal(node.interval.low,interval.low) and floats_are_equal(node.interval.high, interval.high): 219 | z = node 220 | break 221 | if interval.low < node.interval.low: 222 | node = node.left 223 | else: 224 | node = node.right 225 | 226 | if z is None: 227 | print("Couldn't find the interval in tree") 228 | return 229 | 230 | if z.left is None: 231 | x = z.right 232 | self._transplant(z, z.right) 233 | elif z.right is None: 234 | x = z.left 235 | self._transplant(z, z.left) 236 | else: 237 | y = self._minimum(z.right) 238 | x = y.right 239 | if y.parent != z: 240 | self._transplant(y, y.right) 241 | y.right = z.right 242 | y.right.parent = y 243 | self._transplant(z, y) 244 | y.left = z.left 245 | y.left.parent = y 246 | if x: 247 | self._fix_delete(x) 248 | self._delete_fix_color(x) 249 | 250 | def search(self, interval): 251 | curr = self.root 252 | while curr: 253 | if floats_are_equal(curr.interval.low, interval.low) and floats_are_equal(curr.interval.high, interval.high): 254 | return curr 255 | if interval.low < curr.interval.low: 256 | curr = curr.left 257 | else: 258 | curr = curr.right 259 | return None 260 | 261 | def overlap_search(self, interval): 262 | node = self.root 263 | while node: 264 | if self._overlap(node.interval,interval): 265 | return node 266 | if node.left and node.left.max >= interval.low: 267 | node = node.left 268 | else: 269 | node = node.right 270 | return None 271 | 272 | def _overlap(self, interval_a, interval_b): 273 | return interval_a.low <= interval_b.high and interval_b.low <= interval_a.high 274 | 275 | 276 | class IntervalTreeGUI: 277 | def __init__(self): 278 | self.tree = IntervalTree() 279 | self.window = tk.Tk() 280 | self.window.title("Interval Tree GUI") 281 | self.canvas = tk.Canvas(self.window, width=800, height=600, bg="white") 282 | self.canvas.pack() 283 | 284 | # Input controls 285 | self.control_frame = tk.Frame(self.window) 286 | self.control_frame.pack() 287 | 288 | # Insert Controls 289 | self.insert_label = tk.Label(self.control_frame, text="Insert Interval (Low, High):") 290 | self.insert_label.grid(row=0, column=0) 291 | self.insert_low_entry = tk.Entry(self.control_frame) 292 | self.insert_low_entry.grid(row=0, column=1) 293 | self.insert_high_entry = tk.Entry(self.control_frame) 294 | self.insert_high_entry.grid(row=0, column=2) 295 | self.insert_button = tk.Button(self.control_frame, text="Insert", command=self.insert_interval) 296 | self.insert_button.grid(row=0, column=3) 297 | 298 | # Delete Controls 299 | self.delete_label = tk.Label(self.control_frame, text="Delete Interval (Low, High):") 300 | self.delete_label.grid(row=1, column=0) 301 | self.delete_low_entry = tk.Entry(self.control_frame) 302 | self.delete_low_entry.grid(row=1, column=1) 303 | self.delete_high_entry = tk.Entry(self.control_frame) 304 | self.delete_high_entry.grid(row=1, column=2) 305 | self.delete_button = tk.Button(self.control_frame, text="Delete", command=self.delete_interval) 306 | self.delete_button.grid(row=1, column=3) 307 | 308 | # Search Controls 309 | self.search_label = tk.Label(self.control_frame, text="Search Interval (Low, High):") 310 | self.search_label.grid(row=2, column = 0) 311 | self.search_low_entry = tk.Entry(self.control_frame) 312 | self.search_low_entry.grid(row = 2, column = 1) 313 | self.search_high_entry = tk.Entry(self.control_frame) 314 | self.search_high_entry.grid(row = 2, column = 2) 315 | self.search_button = tk.Button(self.control_frame, text = "Search", command = self.search_interval) 316 | self.search_button.grid(row = 2, column = 3) 317 | 318 | # Overlap Search Controls 319 | self.overlap_label = tk.Label(self.control_frame, text = "Overlap Search (Low, High):") 320 | self.overlap_label.grid(row = 3, column = 0) 321 | self.overlap_low_entry = tk.Entry(self.control_frame) 322 | self.overlap_low_entry.grid(row = 3, column = 1) 323 | self.overlap_high_entry = tk.Entry(self.control_frame) 324 | self.overlap_high_entry.grid(row = 3, column = 2) 325 | self.overlap_button = tk.Button(self.control_frame, text = "Overlap Search", command = self.overlap_search) 326 | self.overlap_button.grid(row = 3, column = 3) 327 | 328 | def insert_interval(self): 329 | low_str = self.insert_low_entry.get().strip() 330 | high_str = self.insert_high_entry.get().strip() 331 | if not low_str or not high_str: 332 | messagebox.showerror("Error", "Please enter both low and high values.") 333 | return 334 | try: 335 | low = float(low_str) 336 | high = float(high_str) 337 | self.tree.insert(Interval(low, high)) 338 | self.insert_low_entry.delete(0, tk.END) 339 | self.insert_high_entry.delete(0, tk.END) 340 | self.draw_tree() 341 | except ValueError: 342 | messagebox.showerror("Error", "Please enter valid numbers for low and high.") 343 | 344 | def delete_interval(self): 345 | low_str = self.delete_low_entry.get().strip() 346 | high_str = self.delete_high_entry.get().strip() 347 | if not low_str or not high_str: 348 | messagebox.showerror("Error", "Please enter both low and high values.") 349 | return 350 | try: 351 | low = float(low_str) 352 | high = float(high_str) 353 | self.tree.delete(Interval(low, high)) 354 | self.delete_low_entry.delete(0, tk.END) 355 | self.delete_high_entry.delete(0, tk.END) 356 | self.draw_tree() 357 | except ValueError: 358 | messagebox.showerror("Error", "Please enter valid numbers for low and high.") 359 | 360 | def search_interval(self): 361 | low_str = self.search_low_entry.get().strip() 362 | high_str = self.search_high_entry.get().strip() 363 | if not low_str or not high_str: 364 | messagebox.showerror("Error", "Please enter both low and high values.") 365 | return 366 | try: 367 | low = float(low_str) 368 | high = float(high_str) 369 | result = self.tree.search(Interval(low,high)) 370 | if result: 371 | messagebox.showinfo("Search Result", f"Found interval: {result.interval}") 372 | else: 373 | messagebox.showinfo("Search Result", "Interval not found.") 374 | except ValueError: 375 | messagebox.showerror("Error", "Please enter valid numbers for low and high.") 376 | 377 | def overlap_search(self): 378 | low_str = self.overlap_low_entry.get().strip() 379 | high_str = self.overlap_high_entry.get().strip() 380 | if not low_str or not high_str: 381 | messagebox.showerror("Error", "Please enter both low and high values.") 382 | return 383 | try: 384 | low = float(low_str) 385 | high = float(high_str) 386 | result = self.tree.overlap_search(Interval(low, high)) 387 | if result: 388 | messagebox.showinfo("Overlap Search Result", f"Found overlapping interval: {result.interval}") 389 | else: 390 | messagebox.showinfo("Overlap Search Result", "No overlapping interval found.") 391 | except ValueError: 392 | messagebox.showerror("Error", "Please enter valid numbers for low and high") 393 | 394 | def draw_tree(self): 395 | self.canvas.delete("all") 396 | if self.tree.root: 397 | self._draw_tree(self.tree.root, 400, 50, 150) 398 | 399 | def _draw_tree(self, node, x, y, x_offset): 400 | if node.left: 401 | self.canvas.create_line(x, y, x - x_offset, y + 80) 402 | self._draw_tree(node.left, x - x_offset, y + 80, x_offset // 2) 403 | if node.right: 404 | self.canvas.create_line(x, y, x + x_offset, y + 80) 405 | self._draw_tree(node.right, x + x_offset, y + 80, x_offset // 2) 406 | 407 | label = f"[{node.interval.low}, {node.interval.high}]\nmax={node.max}\nsize={node.size}" 408 | 409 | fill_color = "red" if node.color == 'RED' else 'lightblue' 410 | self.canvas.create_oval(x - 30, y - 30, x + 30, y + 30, fill=fill_color) 411 | self.canvas.create_text(x, y, text=label) 412 | 413 | def run(self): 414 | self.window.mainloop() 415 | 416 | if __name__ == "__main__": 417 | gui = IntervalTreeGUI() 418 | gui.run() -------------------------------------------------------------------------------- /Augmenting_Data_Structures/IntervalTree/Interval_tree_in_practice.py: -------------------------------------------------------------------------------- 1 | # a simple scheduler 2 | # use the following command: 3 | # pip install intervaltree 4 | 5 | import argparse 6 | from intervaltree import Interval, IntervalTree 7 | 8 | class Task: 9 | """Represents a scheduled task.""" 10 | def __init__(self, task_id, start_time, end_time, data=None): 11 | self.id = task_id 12 | self.start = start_time 13 | self.end = end_time 14 | self.data = data # Optional 15 | 16 | def __repr__(self): 17 | return f"Task(id={self.id}, start={self.start}, end={self.end})" 18 | 19 | class IntervalScheduler: 20 | def __init__(self): 21 | self.interval_tree = IntervalTree() 22 | self.task_map = {} # To quickly access Task objects by their IDs. 23 | 24 | def schedule(self, task_id, start_time, end_time, data=None): 25 | """Schedules a new task. Returns True if scheduled, False if conflicts exist""" 26 | if self.is_conflicting(start_time, end_time): 27 | return False # Or handle conflict resolution 28 | 29 | new_task = Task(task_id, start_time, end_time, data) 30 | self.interval_tree.add(Interval(start_time, end_time, new_task)) 31 | self.task_map[task_id] = new_task 32 | return True 33 | 34 | 35 | def is_conflicting(self, start_time, end_time): 36 | """Check if adding a new task at this interval would cause overlap""" 37 | conflicts = self.interval_tree.overlap(start_time, end_time) 38 | return len(conflicts) > 0 39 | 40 | 41 | def query(self, point_or_interval): 42 | """ 43 | Finds tasks active at a given point in time or overlapping 44 | with a given interval. 45 | :param point_or_interval: A single timestamp (int/float) or a tuple (start, end) 46 | :return: A list of Task objects 47 | """ 48 | if isinstance(point_or_interval, (int, float)): 49 | intervals = self.interval_tree.at(point_or_interval) 50 | elif isinstance(point_or_interval, tuple): 51 | start, end = point_or_interval 52 | intervals = self.interval_tree.overlap(start, end) 53 | else: 54 | raise ValueError("query must be called with a time or an interval") 55 | 56 | 57 | return [interval.data for interval in intervals] 58 | 59 | 60 | def remove(self, task_id): 61 | """Removes a scheduled task by its ID.""" 62 | if task_id not in self.task_map: 63 | return False 64 | task = self.task_map[task_id] 65 | self.interval_tree.remove(Interval(task.start, task.end, task)) 66 | del self.task_map[task_id] 67 | return True 68 | 69 | def update(self, task_id, new_start_time, new_end_time): 70 | """Updates the time interval of a scheduled task.""" 71 | if task_id not in self.task_map: 72 | return False 73 | 74 | task_to_update = self.task_map[task_id] 75 | # check if the new interval creates conflicts. 76 | original_start, original_end = task_to_update.start, task_to_update.end 77 | self.remove(task_id) 78 | 79 | if not self.schedule(task_id, new_start_time, new_end_time, task_to_update.data): 80 | # reschedule the original task since there was a conflict. 81 | self.schedule(task_id, original_start, original_end, task_to_update.data) 82 | return False # Can't be updated due to conflicts 83 | return True 84 | 85 | 86 | def get_all_tasks(self): 87 | """Gets all tasks in the scheduler.""" 88 | return list(self.task_map.values()) 89 | 90 | 91 | def main(): 92 | scheduler = IntervalScheduler() 93 | 94 | parser = argparse.ArgumentParser(description="Interval Scheduler CLI") 95 | subparsers = parser.add_subparsers(title="Commands", dest="command", help="Available commands") 96 | 97 | # Schedule command 98 | schedule_parser = subparsers.add_parser("schedule", help="Schedule a task") 99 | schedule_parser.add_argument("task_id", type=int, help="Task ID") 100 | schedule_parser.add_argument("start_time", type=int, help="Start time") 101 | schedule_parser.add_argument("end_time", type=int, help="End time") 102 | schedule_parser.add_argument("--data", type=str, help="Optional task data", default=None) 103 | 104 | # Query command 105 | query_parser = subparsers.add_parser("query", help="Query for active tasks") 106 | query_parser.add_argument("query_type", choices=["point", "interval"], help="Query type: at a point in time, or an interval") 107 | query_parser.add_argument("param1", type=int, help="Start of the time or time at which we need to query") 108 | query_parser.add_argument("param2", type=int, nargs='?', default=None, help="End of time interval, only required for the interval query type") 109 | 110 | # Remove command 111 | remove_parser = subparsers.add_parser("remove", help="Remove a scheduled task") 112 | remove_parser.add_argument("task_id", type=int, help="Task ID to remove") 113 | 114 | # Update command 115 | update_parser = subparsers.add_parser("update", help="Update the time of a task") 116 | update_parser.add_argument("task_id", type=int, help="Task ID to update") 117 | update_parser.add_argument("new_start", type=int, help="New start time") 118 | update_parser.add_argument("new_end", type=int, help="New end time") 119 | 120 | # Get all command 121 | get_all_parser = subparsers.add_parser("get_all", help="Get all tasks") 122 | 123 | # Help command 124 | help_parser = subparsers.add_parser("help", help="Display help for a specific command") 125 | help_parser.add_argument("help_command", nargs="?", help="Command to get help for") 126 | 127 | args = parser.parse_args() 128 | 129 | 130 | if args.command == "help": 131 | if args.help_command: 132 | print(get_command_help(args.help_command, parser)) 133 | else: 134 | parser.print_help() # Overall help if no command is specified 135 | 136 | elif args.command == "schedule": 137 | if scheduler.schedule(args.task_id, args.start_time, args.end_time, args.data): 138 | print(f"Scheduled task {args.task_id} from {args.start_time} to {args.end_time}") 139 | else: 140 | print(f"Could not schedule task {args.task_id}, conflict found") 141 | elif args.command == "query": 142 | if args.query_type == "point": 143 | result = scheduler.query(args.param1) 144 | elif args.query_type == "interval": 145 | result = scheduler.query((args.param1, args.param2)) 146 | print("Query result:") 147 | if result: 148 | for task_data in result: 149 | print(f" - {task_data}") 150 | else: 151 | print(" No tasks found.") 152 | elif args.command == "remove": 153 | if scheduler.remove(args.task_id): 154 | print(f"Removed task {args.task_id}") 155 | else: 156 | print(f"Task {args.task_id} not found") 157 | elif args.command == "update": 158 | if scheduler.update(args.task_id, args.new_start, args.new_end): 159 | print(f"Updated task {args.task_id} to run from {args.new_start} to {args.new_end}") 160 | else: 161 | print(f"Could not update task {args.task_id}, possible conflict found") 162 | elif args.command == "get_all": 163 | tasks = scheduler.get_all_tasks() 164 | if tasks: 165 | print("All tasks:") 166 | for task in tasks: 167 | print(f" - {task}") 168 | else: 169 | print("No task scheduled yet") 170 | 171 | elif args.command is None: 172 | parser.print_help() 173 | 174 | 175 | def get_command_help(command, parser): 176 | """Generates help for a specific command""" 177 | for action in parser._actions: 178 | if isinstance(action, argparse._SubParsersAction): 179 | for choice, subparser in action.choices.items(): 180 | if choice == command: 181 | return subparser.format_help() 182 | return "Command not found, try `python script.py --help`" 183 | 184 | 185 | if __name__ == "__main__": 186 | main() -------------------------------------------------------------------------------- /Augmenting_Data_Structures/RedBlackTree_size.py: -------------------------------------------------------------------------------- 1 | from graphviz import Digraph 2 | import os 3 | import uuid 4 | from datetime import datetime 5 | 6 | 7 | def floats_are_equal(a, b, eps=1e-3): 8 | #Returns True if a and b are within eps of each other 9 | return abs(a - b) < eps 10 | 11 | class Node: 12 | def __init__(self, data): 13 | self.data = data 14 | self.color = 'RED' # default color 15 | self.left = None 16 | self.right = None 17 | self.parent = None 18 | self.size = 1 # Added size attribute 19 | 20 | 21 | class RedBlackTree: 22 | 23 | def __init__(self): 24 | self.NIL_LEAF = Node(None) # Sentinel NIL leaf node 25 | self.NIL_LEAF.color = 'BLACK' 26 | self.NIL_LEAF.size = 0 # size of nil node is 0 27 | self.root = self.NIL_LEAF 28 | 29 | def _update_size(self, node): 30 | """Updates the size of the node.""" 31 | if node == self.NIL_LEAF: 32 | return 33 | node.size = node.left.size + node.right.size + 1 34 | 35 | def _transplant(self, u, v): 36 | if u.parent is None: 37 | self.root = v 38 | elif u == u.parent.left: 39 | u.parent.left = v 40 | else: 41 | u.parent.right = v 42 | v.parent = u.parent 43 | 44 | # Update the sizes of the ancestors after transplant 45 | current = v.parent 46 | while current: 47 | self._update_size(current) 48 | current = current.parent 49 | 50 | 51 | def _minimum(self, node): 52 | while node.left != self.NIL_LEAF: 53 | node = node.left 54 | return node 55 | 56 | def _left_rotate(self, x): 57 | y = x.right 58 | x.right = y.left 59 | if y.left != self.NIL_LEAF: 60 | y.left.parent = x 61 | y.parent = x.parent 62 | if x.parent is None: 63 | self.root = y 64 | elif x == x.parent.left: 65 | x.parent.left = y 66 | else: 67 | x.parent.right = y 68 | y.left = x 69 | x.parent = y 70 | 71 | #update sizes after rotation 72 | self._update_size(x) 73 | self._update_size(y) 74 | 75 | def _right_rotate(self, y): 76 | x = y.left 77 | y.left = x.right 78 | if x.right != self.NIL_LEAF: 79 | x.right.parent = y 80 | x.parent = y.parent 81 | if y.parent is None: 82 | self.root = x 83 | elif y == y.parent.left: 84 | y.parent.left = x 85 | else: 86 | y.parent.right = x 87 | x.right = y 88 | y.parent = x 89 | 90 | # update sizes after rotation 91 | self._update_size(y) 92 | self._update_size(x) 93 | 94 | def _fix_insert(self, k): 95 | while k.parent and k.parent.color == 'RED': 96 | if k.parent == k.parent.parent.left: 97 | u = k.parent.parent.right 98 | if u.color == 'RED': 99 | k.parent.color = 'BLACK' 100 | u.color = 'BLACK' 101 | k.parent.parent.color = 'RED' 102 | k = k.parent.parent 103 | else: 104 | if k == k.parent.right: 105 | k = k.parent 106 | self._left_rotate(k) 107 | k.parent.color = 'BLACK' 108 | k.parent.parent.color = 'RED' 109 | self._right_rotate(k.parent.parent) 110 | else: 111 | u = k.parent.parent.left 112 | if u.color == 'RED': 113 | k.parent.color = 'BLACK' 114 | u.color = 'BLACK' 115 | k.parent.parent.color = 'RED' 116 | k = k.parent.parent 117 | else: 118 | if k == k.parent.left: 119 | k = k.parent 120 | self._right_rotate(k) 121 | k.parent.color = 'BLACK' 122 | k.parent.parent.color = 'RED' 123 | self._left_rotate(k.parent.parent) 124 | self.root.color = 'BLACK' 125 | 126 | def _update_ancestor_size_on_insert(self, node): 127 | """Updates the size of the node's ancestors after insertion""" 128 | current = node.parent 129 | while current: 130 | self._update_size(current) 131 | current = current.parent 132 | 133 | 134 | def insert(self, data): 135 | node = Node(data) 136 | node.left = self.NIL_LEAF 137 | node.right = self.NIL_LEAF 138 | if self.root == self.NIL_LEAF: 139 | self.root = node 140 | node.color = 'BLACK' 141 | else: 142 | current = self.root 143 | while current != self.NIL_LEAF: 144 | parent = current 145 | if node.data < current.data: 146 | current = current.left 147 | else: 148 | current = current.right 149 | node.parent = parent 150 | if node.data < parent.data: 151 | parent.left = node 152 | else: 153 | parent.right = node 154 | self._fix_insert(node) 155 | 156 | self._update_ancestor_size_on_insert(node) 157 | 158 | def _fix_delete(self, x): 159 | while x != self.root and x.color == 'BLACK': 160 | if x == x.parent.left: 161 | s = x.parent.right 162 | if s.color == 'RED': 163 | s.color = 'BLACK' 164 | x.parent.color = 'RED' 165 | self._left_rotate(x.parent) 166 | s = x.parent.right 167 | if s.left.color == 'BLACK' and s.right.color == 'BLACK': 168 | s.color = 'RED' 169 | x = x.parent 170 | else: 171 | if s.right.color == 'BLACK': 172 | s.left.color = 'BLACK' 173 | s.color = 'RED' 174 | self._right_rotate(s) 175 | s = x.parent.right 176 | s.color = x.parent.color 177 | x.parent.color = 'BLACK' 178 | s.right.color = 'BLACK' 179 | self._left_rotate(x.parent) 180 | x = self.root 181 | else: 182 | s = x.parent.left 183 | if s.color == 'RED': 184 | s.color = 'BLACK' 185 | x.parent.color = 'RED' 186 | self._right_rotate(x.parent) 187 | s = x.parent.left 188 | if s.left.color == 'BLACK' and s.right.color == 'BLACK': 189 | s.color = 'RED' 190 | x = x.parent 191 | else: 192 | if s.left.color == 'BLACK': 193 | s.right.color = 'BLACK' 194 | s.color = 'RED' 195 | self._left_rotate(s) 196 | s = x.parent.left 197 | s.color = x.parent.color 198 | x.parent.color = 'BLACK' 199 | s.left.color = 'BLACK' 200 | self._right_rotate(x.parent) 201 | x = self.root 202 | x.color = 'BLACK' 203 | 204 | def delete(self, data): 205 | node = self.root 206 | z = self.NIL_LEAF 207 | while node != self.NIL_LEAF: 208 | if floats_are_equal(node.data , data): 209 | z = node 210 | if node.data <= data: 211 | node = node.right 212 | else: 213 | node = node.left 214 | if z == self.NIL_LEAF: 215 | print("Couldn't find key in the tree") 216 | return 217 | y = z 218 | y_original_color = y.color 219 | if z.left == self.NIL_LEAF: 220 | x = z.right 221 | self._transplant(z, z.right) 222 | elif z.right == self.NIL_LEAF: 223 | x = z.left 224 | self._transplant(z, z.left) 225 | else: 226 | y = self._minimum(z.right) 227 | y_original_color = y.color 228 | x = y.right 229 | if y.parent == z: 230 | x.parent = y 231 | else: 232 | self._transplant(y, y.right) 233 | y.right = z.right 234 | y.right.parent = y 235 | self._transplant(z, y) 236 | y.left = z.left 237 | y.left.parent = y 238 | y.color = z.color 239 | if y_original_color == 'BLACK': 240 | self._fix_delete(x) 241 | 242 | def search(self, data): 243 | current = self.root 244 | while current != self.NIL_LEAF and not floats_are_equal(current.data , data): 245 | if data < current.data: 246 | current = current.left 247 | else: 248 | current = current.right 249 | return current if current != self.NIL_LEAF else None 250 | 251 | 252 | class VisualRedBlackTree(RedBlackTree): 253 | def __init__(self): 254 | super().__init__() 255 | self.dot = Digraph(comment="Red-Black Tree") 256 | self.output_folder = self._create_output_folder() 257 | 258 | def _create_output_folder(self): 259 | current_dir = os.path.dirname(os.path.abspath(__file__)) 260 | timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") 261 | folder_name = f"Red_Black_{timestamp}" 262 | output_path = os.path.join(current_dir, folder_name) 263 | os.makedirs(output_path, exist_ok=True) 264 | return output_path 265 | 266 | def visualize_tree(self): 267 | self.dot = Digraph(comment="Red-Black Tree") # Reset graph 268 | self._add_nodes_edges(self.root) 269 | file_name = f"rb_tree_{uuid.uuid4()}.png" 270 | file_path = os.path.join(self.output_folder, file_name) 271 | self.dot.render(file_path, format="png", cleanup=True) 272 | print(f"Tree visualization saved as {file_path}") 273 | 274 | def _add_nodes_edges(self, node): 275 | if node == self.NIL_LEAF: 276 | return 277 | 278 | color = "black" if node.color == "BLACK" else "red" 279 | self.dot.node(str(id(node)), label=f"{node.data} ({node.size})", color=color, fontcolor=color) 280 | 281 | if node.left != self.NIL_LEAF: 282 | self.dot.edge(str(id(node)), str(id(node.left)), label="L") 283 | self._add_nodes_edges(node.left) 284 | 285 | if node.right != self.NIL_LEAF: 286 | self.dot.edge(str(id(node)), str(id(node.right)), label="R") 287 | self._add_nodes_edges(node.right) 288 | 289 | def insert(self, data): 290 | print(f"Inserting {data}") 291 | super().insert(data) 292 | self.visualize_tree() 293 | 294 | def delete(self , data): 295 | print(f"deleting {data}") 296 | super().delete(data) 297 | self.visualize_tree() 298 | if __name__ == "__main__": 299 | rbt = VisualRedBlackTree() 300 | 301 | while True: 302 | command_str = input("Enter command (insert , delete , or exit): ").strip().lower() 303 | parts = command_str.split() 304 | 305 | if not parts: 306 | continue # Handle empty input 307 | 308 | command = parts[0] 309 | 310 | if command == "insert": 311 | if len(parts) == 2: 312 | try: 313 | num = float(parts[1]) 314 | rbt.insert(num) 315 | except ValueError: 316 | print("Invalid number format. Please enter a valid integer or int.") 317 | else: 318 | print("Invalid insert command. Usage: insert ") 319 | elif command == "delete": 320 | if len(parts) == 2: 321 | try: 322 | num = float(parts[1]) 323 | rbt.delete(num) 324 | except ValueError: 325 | print("Invalid number format. Please enter a valid integer or int.") 326 | else: 327 | print("Invalid delete command. Usage: delete ") 328 | elif command == "exit": 329 | print("Exiting program.") 330 | break 331 | else: 332 | print("Invalid command. Please use 'insert', 'delete', or 'exit'.") 333 | """ 334 | 335 | # debugging 336 | # Example Usage 337 | if __name__ == "__main__": 338 | rbt = VisualRedBlackTree() 339 | 340 | rbt.insert(8453) 341 | rbt.insert(4553) 342 | rbt.insert(453) 343 | rbt.insert(453) 344 | rbt.insert(843) 345 | rbt.insert(-12) 346 | rbt.insert(1) 347 | rbt.insert(-1) 348 | rbt.insert(-20) 349 | rbt.insert(25) 350 | rbt.insert(5) 351 | rbt.insert(15) # Automatically visualizes the updated tree 352 | rbt.delete(-12) # Automatically visualizes the updated tree 353 | rbt.delete(453) 354 | rbt.delete(453) 355 | rbt.delete(4553) 356 | rbt.delete(8453) 357 | rbt.insert(4) 358 | rbt.insert(-4) 359 | rbt.insert(6) 360 | rbt.insert(-8) 361 | rbt.insert(-13) 362 | rbt.insert(25) 363 | rbt.insert(5) 364 | rbt.delete(5) 365 | rbt.delete(-13) 366 | """ -------------------------------------------------------------------------------- /Convolution/Naive/Naive_Convolution.cu: -------------------------------------------------------------------------------- 1 | // for google colab 2 | // %%writefile naive_convolution.cu 3 | // !nvcc -arch=sm_75 -o naive_convolution naive_convolution.cu 4 | 5 | 6 | #include 7 | #include 8 | #include 9 | 10 | 11 | 12 | #define cudaCheckErrors(msg) \ 13 | do { \ 14 | cudaError_t __err = cudaGetLastError(); \ 15 | if (__err != cudaSuccess) { \ 16 | fprintf(stderr, "Fatal error: %s (%s at %s:%d)\n", \ 17 | msg, cudaGetErrorString(__err), \ 18 | __FILE__, __LINE__); \ 19 | fprintf(stderr, "*** FAILED - ABORTING\n"); \ 20 | exit(1); \ 21 | } \ 22 | } while (0) 23 | 24 | __global__ void direct_convolution( 25 | const float* input, const float* kernel, float* output, 26 | int in_channels, int in_h, int in_w, 27 | int out_channels, int kernel_size, int stride, int pad 28 | ) { 29 | const int out_h = (in_h + 2 * pad - kernel_size) / stride + 1; 30 | const int out_w = (in_w + 2 * pad - kernel_size) / stride + 1; 31 | 32 | // Output coordinates (x, y) and output channel 33 | const int idx = blockIdx.x * blockDim.x + threadIdx.x; 34 | const int idy = blockIdx.y * blockDim.y + threadIdx.y; 35 | const int c_out = blockIdx.z; 36 | 37 | if (idx >= out_w || idy >= out_h || c_out >= out_channels) return; 38 | 39 | float sum = 0.0f; 40 | 41 | // Iterate over input channels 42 | for (int c_in = 0; c_in < in_channels; c_in++) { 43 | // Iterate over kernel elements 44 | for (int ky = 0; ky < kernel_size; ky++) { 45 | for (int kx = 0; kx < kernel_size; kx++) { 46 | // Input coordinates (adjusted for padding) 47 | const int in_y = idy * stride + ky - pad; 48 | const int in_x = idx * stride + kx - pad; 49 | 50 | if (in_y >= 0 && in_y < in_h && in_x >= 0 && in_x < in_w) { 51 | const float input_val = input[(c_in * in_h + in_y) * in_w + in_x]; 52 | const float kernel_val = kernel[((c_out * in_channels + c_in) * kernel_size + ky) * kernel_size + kx]; 53 | sum += input_val * kernel_val; 54 | } 55 | } 56 | } 57 | } 58 | 59 | // Write output (add batch dimension) 60 | output[(c_out * out_h + idy) * out_w + idx] = sum; 61 | } 62 | 63 | int main() { 64 | // Configuration (example: 1024x1024 input, 3x3 kernel) 65 | const int batch = 1; 66 | const int in_channels = 3; 67 | const int in_h = 1024, in_w = 1024; 68 | const int out_channels = 64; 69 | const int kernel_size = 3; 70 | const int stride = 1; 71 | const int pad = 1; 72 | 73 | const int out_h = (in_h + 2 * pad - kernel_size) / stride + 1; 74 | const int out_w = (in_w + 2 * pad - kernel_size) / stride + 1; 75 | 76 | float *h_input = new float[batch * in_channels * in_h * in_w]; 77 | float *h_kernel = new float[out_channels * in_channels * kernel_size * kernel_size]; 78 | float *h_output = new float[batch * out_channels * out_h * out_w]; 79 | 80 | std::fill(h_input, h_input + batch * in_channels * in_h * in_w, 1.0f); 81 | std::fill(h_kernel, h_kernel + out_channels * in_channels * kernel_size * kernel_size, 1.0f); 82 | 83 | float *d_input, *d_kernel, *d_output; 84 | cudaMalloc(&d_input, batch * in_channels * in_h * in_w * sizeof(float)); 85 | cudaCheckErrors("cudaMalloc d_input failed"); 86 | cudaMalloc(&d_kernel, out_channels * in_channels * kernel_size * kernel_size * sizeof(float)); 87 | cudaCheckErrors("cudaMalloc d_kernel failed"); 88 | cudaMalloc(&d_output, batch * out_channels * out_h * out_w * sizeof(float)); 89 | cudaCheckErrors("cudaMalloc d_output failed"); 90 | 91 | cudaMemcpy(d_input, h_input, batch * in_channels * in_h * in_w * sizeof(float), cudaMemcpyHostToDevice); 92 | cudaCheckErrors("cudaMemcpy H2D d_input failed"); 93 | cudaMemcpy(d_kernel, h_kernel, out_channels * in_channels * kernel_size * kernel_size * sizeof(float), cudaMemcpyHostToDevice); 94 | cudaCheckErrors("cudaMemcpy H2D d_kernel failed"); 95 | 96 | dim3 block(16, 16); 97 | dim3 grid( 98 | (out_w + block.x - 1) / block.x, 99 | (out_h + block.y - 1) / block.y, 100 | out_channels 101 | ); 102 | 103 | cudaEvent_t start, stop; 104 | cudaEventCreate(&start); 105 | cudaEventCreate(&stop); 106 | cudaCheckErrors("cudaEventCreate failed"); 107 | 108 | direct_convolution<<>>(d_input, d_kernel, d_output, in_channels, in_h, in_w, out_channels, kernel_size, stride, pad); 109 | cudaCheckErrors("Warm-up kernel failed"); 110 | 111 | cudaEventRecord(start); 112 | cudaCheckErrors("cudaEventRecord start failed"); 113 | for (int i = 0; i < 10; ++i) { 114 | // Running multiple times for stable measurement 115 | direct_convolution<<>>(d_input, d_kernel, d_output, in_channels, in_h, in_w, out_channels, kernel_size, stride, pad); 116 | cudaCheckErrors("Main convolution kernel failed"); 117 | } 118 | cudaEventRecord(stop); 119 | cudaCheckErrors("cudaEventRecord stop failed"); 120 | cudaEventSynchronize(stop); 121 | cudaCheckErrors("cudaEventSynchronize failed"); 122 | 123 | float milliseconds = 0; 124 | cudaEventElapsedTime(&milliseconds, start, stop); 125 | cudaCheckErrors("cudaEventElapsedTime failed"); 126 | std::cout << "Time per convolution: " << milliseconds / 10 << " ms\n"; 127 | 128 | cudaMemcpy(h_output, d_output, batch * out_channels * out_h * out_w * sizeof(float), cudaMemcpyDeviceToHost); 129 | cudaCheckErrors("cudaMemcpy D2H failed"); 130 | 131 | const float expected_value = in_channels * 4; 132 | std::cout << "First output value: " << h_output[0] << " (Expected: " << expected_value << ")\n"; 133 | 134 | // Cleanup 135 | delete[] h_input; 136 | delete[] h_kernel; 137 | delete[] h_output; 138 | cudaFree(d_input); 139 | cudaFree(d_kernel); 140 | cudaFree(d_output); 141 | cudaEventDestroy(start); 142 | cudaEventDestroy(stop); 143 | 144 | return 0; 145 | } -------------------------------------------------------------------------------- /Graph_algorithms/AllPairsShortestPaths/FloydWarshall.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | def floyd_warsall(graph): 4 | """ 5 | Floyd-Warshall Algorithm to find all-pairs shortest paths in a weighted graph. 6 | 7 | Parameters: 8 | graph (list of list of int): Adjacency matrix representation of the graph. 9 | graph[i][j] represents the weight of the edge from vertex i to vertex j. 10 | If there is no edge, graph[i][j] should be set to infinity (sys.maxsize). 11 | 12 | Returns: 13 | dist (list of list of int): A 2D list where dist[i][j] is the shortest distance from vertex i to vertex j. 14 | next_vertex (list of list of int): A 2D list used to reconstruct the shortest paths. 15 | 16 | Running Time: 17 | The algorithm runs in Θ(V^3) time, where V is the number of vertices in the graph. 18 | """ 19 | # |V| = V 20 | V = len(graph) 21 | dist = [[graph[i][j] for j in range(V)] for i in range(V)] 22 | # next_vertex matrxi for path reconstruction 23 | next_vertex = [[j if graph[i][j] != sys.maxsize else None for j in range(V)] for i in range(V)] 24 | 25 | # floydWarshall algorithm 26 | 27 | for k in range(V): # intermediary vertex 28 | for i in range(V): # source vertex 29 | for j in range(V): # destination vertex 30 | if dist[i][k] != sys.maxsize and dist[k][j] != sys.maxsize \ 31 | and dist[i][j] > dist[i][k] + dist[k][j]: 32 | dist[i][j] = dist[i][k] + dist[k][j] 33 | next_vertex[i][j] = next_vertex[i][k] 34 | 35 | for i in range(V): 36 | if dist[i][i] < 0: 37 | raise ValueError("Graph contains a negetive cycle") 38 | 39 | return dist , next_vertex 40 | 41 | def reconstruct_path(next_vertex , start , end): 42 | """ 43 | Reconstruct the shortest path from start to end using the next_vertex matrix. 44 | 45 | Parameters: 46 | next_vertex (list of list of int): The next_vertex matrix from the Floyd-Warshall algorithm. 47 | start (int): The starting vertex. 48 | end (int): The destination vertex. 49 | 50 | Returns: 51 | path (list of int): The shortest path from start to end. 52 | """ 53 | if next_vertex[start][end] is None: 54 | return [] # no path 55 | path = [start] 56 | while start != end: 57 | start = next_vertex[start][end] 58 | path.append(start) 59 | return path 60 | 61 | if __name__ == "__main__": 62 | # here I have used sys.maxsize for representing infinity 63 | graph = [ 64 | [0, 3, sys.maxsize, 7], 65 | [8, 0, 2, sys.maxsize], 66 | [5, sys.maxsize, 0, 1], 67 | [2, sys.maxsize, sys.maxsize, 0] 68 | ] 69 | dist , next_vertex = floyd_warsall(graph) 70 | 71 | print("Shortest distances between all pairs of vertices:") 72 | for row in dist: 73 | print(row) 74 | 75 | # reconstruct and print the shortest path from vertex 0 to vertex 3 76 | # you can change this 77 | start, end = 0, 3 78 | path = reconstruct_path(next_vertex, start, end) 79 | print(f"\nShortest path from {start} to {end}: {path}") -------------------------------------------------------------------------------- /Graph_algorithms/AllPairsShortestPaths/Johnson.py: -------------------------------------------------------------------------------- 1 | import heapq 2 | 3 | 4 | 5 | def bellman_ford(graph, weights, source): 6 | distance = {v: float('inf') for v in graph} 7 | distance[source] = 0 8 | 9 | for _ in range(len(graph) - 1): 10 | for u in graph: 11 | for v in graph[u]: 12 | if distance[u] + weights[(u, v)] < distance[v]: 13 | distance[v] = distance[u] + weights[(u, v)] 14 | 15 | for u in graph: 16 | for v in graph[u]: 17 | if distance[u] + weights[(u, v)] < distance[v]: 18 | return None, None # Negative-weight cycle detected 19 | 20 | return distance, True 21 | 22 | def dijkstra(graph, weights, source): 23 | pq = [(0, source)] 24 | distance = {v: float('inf') for v in graph} 25 | distance[source] = 0 26 | 27 | while pq: 28 | d, u = heapq.heappop(pq) 29 | if d > distance[u]: 30 | continue 31 | 32 | for v in graph[u]: 33 | alt = distance[u] + weights[(u, v)] 34 | if alt < distance[v]: 35 | distance[v] = alt 36 | heapq.heappush(pq, (alt, v)) 37 | 38 | return distance 39 | 40 | def johnson(graph, weights): 41 | """ 42 | Johnson's Algorithm for All-Pairs Shortest Paths 43 | 44 | This algorithm computes the shortest paths between all pairs of vertices in a weighted, directed graph. 45 | It efficiently handles graphs with negative weights but not negative-weight cycles. 46 | 47 | Algorithm Steps: 48 | 1. Augment the graph by adding a new vertex connected to all other vertices with zero-weight edges. 49 | 2. Run Bellman-Ford from the new vertex to compute potential values. 50 | 3. Reweight the original edges to ensure all weights are non-negative. 51 | 4. Run Dijkstra's algorithm from each vertex to compute shortest paths. 52 | 5. Adjust the results to obtain correct distances in the original graph. 53 | 54 | Time Complexity: 55 | - Bellman-Ford: O(VE) 56 | - Dijkstra (using a priority queue): O((V + E) log V) 57 | - Overall: O(VE + V log V) in the worst case 58 | 59 | Space Complexity: 60 | - O(V^2) for storing the distance matrix 61 | """ 62 | new_graph = {v: set(neighbors) for v, neighbors in graph.items()} 63 | new_graph['s'] = set(graph.keys()) 64 | new_weights = weights.copy() 65 | for v in graph: 66 | new_weights[('s', v)] = 0 67 | 68 | h, valid = bellman_ford(new_graph, new_weights, 's') 69 | if not valid: 70 | print("The input graph contains a negative-weight cycle") 71 | return None 72 | 73 | reweighted_weights = {} 74 | for (u, v), w in weights.items(): 75 | reweighted_weights[(u, v)] = w + h[u] - h[v] 76 | 77 | D = {} 78 | for u in graph: 79 | shortest_paths = dijkstra(graph, reweighted_weights, u) 80 | D[u] = {v: shortest_paths[v] + h[v] - h[u] for v in graph} 81 | 82 | return D 83 | 84 | 85 | if __name__ == '__main__': 86 | graph = { 87 | 'A': {'B', 'C'}, 88 | 'B': {'C', 'D'}, 89 | 'C': {'D'}, 90 | 'D': set() 91 | } 92 | weights = { 93 | ('A', 'B'): 1, 94 | ('A', 'C'): 4, 95 | ('B', 'C'): 2, 96 | ('B', 'D'): 5, 97 | ('C', 'D'): 1 98 | } 99 | 100 | shortest_paths = johnson(graph, weights) 101 | print(shortest_paths) 102 | -------------------------------------------------------------------------------- /Graph_algorithms/AllPairsShortestPaths/ReadME.md: -------------------------------------------------------------------------------- 1 | ### **1. Problem Definition** 2 | The **all-pairs shortest-paths problem** involves finding the shortest paths between every pair of vertices in a weighted, directed graph. Here’s a deeper look: 3 | 4 | #### **Key Points**: 5 | - **Input**: A weighted, directed graph \( G = (V, E) \) with a weight function \( w : E \rightarrow \mathbb{R} \). The graph may contain negative-weight edges but no negative-weight cycles. 6 | - **Output**: A matrix \( D = (d_{ij}) \), where \( d_{ij} \) represents the shortest-path weight from vertex \( i \) to vertex \( j \). If no path exists, \( d_{ij} = \infty \). 7 | - **Applications**: 8 | - **Network Diameter**: The longest shortest path in a network, which is useful for determining the worst-case communication delay in a network. 9 | - **Distance Tables**: Computing distances between all pairs of cities or locations, as in a road atlas. 10 | - **Transitive Closure**: Determining reachability between all pairs of vertices in a graph. 11 | 12 | #### **Example**: 13 | - If the graph represents a road network, the shortest-path weight \( d_{ij} \) could represent the shortest driving distance from city \( i \) to city \( j \). 14 | 15 | --- 16 | 17 | ### **2. Approaches to Solve the Problem** 18 | The chapter discusses several approaches to solve the all-pairs shortest-paths problem: 19 | 20 | #### **a. Repeated Single-Source Algorithms** 21 | - **Idea**: Run a single-source shortest-path algorithm (like Dijkstra's or Bellman-Ford) \(|V|\) times, once for each vertex as the source. 22 | - **Dijkstra's Algorithm**: 23 | - Works for graphs with **nonnegative edge weights**. 24 | - Running time: 25 | - \( O(V^3) \) with a linear array for the min-priority queue. 26 | - \( O(V^2 \lg V + VE) \) with a Fibonacci heap. 27 | - Best for sparse graphs (where \( |E| \) is much smaller than \( |V|^2 \)). 28 | - **Bellman-Ford Algorithm**: 29 | - Works for graphs with **negative edge weights** (but no negative-weight cycles). 30 | - Running time: \( O(V^2E) \), which is \( O(V^4) \) for dense graphs. 31 | - Slower than Dijkstra's but handles negative weights. 32 | 33 | #### **b. Limitations**: 34 | - Repeatedly running single-source algorithms is inefficient for large graphs, especially dense ones. 35 | - The chapter introduces more efficient algorithms (e.g., Floyd-Warshall, Johnson's) to handle the all-pairs problem directly. 36 | 37 | --- 38 | 39 | ### **3. Graph Representation** 40 | The graph is represented using an **adjacency matrix**, which is a common choice for all-pairs shortest-paths algorithms. 41 | 42 | #### **Adjacency Matrix \( W = (w_{ij}) \)**: 43 | - \( w_{ij} \) represents the weight of the edge from vertex \( i \) to vertex \( j \). 44 | - The matrix is defined as: 45 | \[ 46 | w_{ij} = 47 | \begin{cases} 48 | 0 & \text{if } i = j, \\ 49 | \text{weight of edge } (i, j) & \text{if } i \neq j \text{ and } (i, j) \in E, \\ 50 | \infty & \text{if } i \neq j \text{ and } (i, j) \notin E. 51 | \end{cases} 52 | \] 53 | - **Example**: 54 | - If there is no edge from \( i \) to \( j \), \( w_{ij} = \infty \). 55 | - If \( i = j \), \( w_{ij} = 0 \) (the distance from a vertex to itself is zero). 56 | 57 | #### **Why Use an Adjacency Matrix?** 58 | - **Efficiency**: Matrix operations are well-suited for dynamic programming algorithms like Floyd-Warshall and matrix multiplication-based approaches. 59 | - **Simplicity**: The matrix representation makes it easy to access and update edge weights. 60 | 61 | --- 62 | 63 | ### **4. Predecessor Matrix** 64 | In addition to the shortest-path weights, the solution includes a **predecessor matrix** \( \Pi = (\pi_{ij}) \), which helps reconstruct the shortest paths. 65 | 66 | #### **Definition**: 67 | - \( \pi_{ij} \) is the predecessor of vertex \( j \) on the shortest path from vertex \( i \). 68 | - If \( i = j \) or there is no path from \( i \) to \( j \), \( \pi_{ij} = \text{NIL} \). 69 | 70 | #### **Predecessor Subgraph**: 71 | - For each vertex \( i \), the predecessor subgraph \( G_{\pi,i} = (V_{\pi,i}, E_{\pi,i}) \) is a shortest-paths tree rooted at \( i \). 72 | - **Vertices**: \( V_{\pi,i} = \{ j \in V : \pi_{ij} \neq \text{NIL} \} \cup \{ i \} \). 73 | - **Edges**: \( E_{\pi,i} = \{ (\pi_{ij}, j) : j \in V_{\pi,i} - \{ i \} \} \). 74 | 75 | #### **Reconstructing Shortest Paths**: 76 | - The `PRINT-ALL-PAIRS-SHORTEST-PATH` procedure uses the predecessor matrix to print the shortest path from \( i \) to \( j \): 77 | ```python 78 | PRINT-ALL-PAIRS-SHORTEST-PATH(Π, i, j): 79 | if i == j: 80 | print i 81 | elif π_ij == NIL: 82 | print "no path from" i "to" j "exists" 83 | else: 84 | PRINT-ALL-PAIRS-SHORTEST-PATH(Π, i, π_ij) 85 | print j 86 | ``` 87 | 88 | #### **Example**: 89 | - If \( \pi_{ij} = k \), then the shortest path from \( i \) to \( j \) goes through \( k \). The procedure recursively prints the path from \( i \) to \( k \) and then prints \( j \). 90 | 91 | --- 92 | 93 | ### **Summary of Parts 1-4** 94 | 1. **Problem Definition**: The all-pairs shortest-paths problem involves finding the shortest paths between all pairs of vertices in a graph. It has applications in network analysis, distance computation, and transitive closure. 95 | 2. **Approaches**: 96 | - Repeated single-source algorithms (Dijkstra's, Bellman-Ford) are straightforward but inefficient for large graphs. 97 | - More efficient algorithms (e.g., Floyd-Warshall, Johnson's) are introduced later in the chapter. 98 | 3. **Graph Representation**: The graph is represented using an adjacency matrix, which is well-suited for dynamic programming algorithms. 99 | 4. **Predecessor Matrix**: This matrix helps reconstruct the shortest paths and forms shortest-paths trees for each vertex. 100 | -------------------------------------------------------------------------------- /Graph_algorithms/Elementary-Algorithms/BFS/BFS.py: -------------------------------------------------------------------------------- 1 | from collections import deque 2 | 3 | def BFS(graph, start): 4 | """ 5 | Breadth-First Search (BFS) Traversal 6 | 7 | Description: 8 | BFS is a graph traversal algorithm that explores all the vertices of a graph level by level. 9 | It starts at a given source vertex and explores all its neighbors before moving on to the 10 | neighbors of its neighbors. BFS is useful for finding the shortest path in an unweighted graph 11 | and for exploring all connected components of a graph. 12 | 13 | Algorithm: 14 | 1. Start from a source vertex and mark it as visited. 15 | 2. Add the source vertex to a queue. 16 | 3. While the queue is not empty: 17 | a. Dequeue a vertex from the queue. 18 | b. Explore all its unvisited neighbors, mark them as visited, and enqueue them. 19 | 4. Repeat until the queue is empty. 20 | 21 | Parameters: 22 | graph (dict): The graph represented as an adjacency list. Keys are vertices, and values are 23 | lists of adjacent vertices. 24 | start (int): The starting vertex for the BFS traversal. 25 | 26 | Returns: 27 | list: A list of vertices in the order they were visited during the BFS traversal. 28 | 29 | Time Complexity: 30 | O(V + E), where V is the number of vertices and E is the number of edges. 31 | 32 | Space Complexity: 33 | O(V), for storing the queue and visited set. 34 | 35 | Perform Breadth-First Search (BFS) traversal on a graph. 36 | 37 | Args: 38 | graph (dict): The graph represented as an adjacency list. 39 | start (int): The starting vertex for the BFS traversal. 40 | 41 | Returns: 42 | list: A list of vertices in the order they were visited. 43 | """ 44 | # Initialize a queue for BFS 45 | queue = deque([start]) 46 | visited = set([start]) 47 | traversal_order = [] 48 | 49 | while queue: 50 | # Dequeue a vertex from the queue 51 | vertex = queue.popleft() 52 | traversal_order.append(vertex) 53 | 54 | # Explore all neighbors of the current vertex 55 | for neighbor in graph[vertex]: 56 | if neighbor not in visited: 57 | visited.add(neighbor) 58 | queue.append(neighbor) 59 | 60 | return traversal_order 61 | # # for using bfs 62 | # graph = { 63 | # 0: [1, 2], 64 | # 1: [0, 3, 4], 65 | # 2: [0, 5], 66 | # 3: [1], 67 | # 4: [1], 68 | # 5: [2] 69 | # } 70 | # bfs_traversal = BFS(graph, 0) 71 | # print(bfs_traversal) 72 | 73 | -------------------------------------------------------------------------------- /Graph_algorithms/Elementary-Algorithms/BFS/Bipartite.py: -------------------------------------------------------------------------------- 1 | # problem 20.2-7 - CLRS book 2 | 3 | """ 4 | Problem: 5 | 6 | There are two types of professional wrestlers: “faces” (short for 7 | “babyfaces,” i.e., “good guys”) and “heels” (“bad guys”). Between any 8 | pair of professional wrestlers, there may or may not be a rivalry. You 9 | are given the names of n professional wrestlers and a list of r pairs of 10 | wrestlers for which there are rivalries. Give an O(n + r)-time algorithm 11 | that determines whether it is possible to designate some of the wrestlers 12 | as faces and the remainder as heels such that each rivalry is between a 13 | face and a heel. If it is possible to perform such a designation, your 14 | algorithm should produce it. 15 | """ 16 | from collections import deque 17 | 18 | def is_bipartite(graph, n): 19 | """ 20 | Determine if the graph is bipartite and return the designation of wrestlers. 21 | 22 | Description: 23 | A bipartite graph is a graph whose vertices can be divided into two disjoint sets 24 | such that no two vertices within the same set are adjacent. In the context of this 25 | problem, the two sets represent "faces" (good guys) and "heels" (bad guys), and the 26 | edges represent rivalries. The goal is to determine if it is possible to assign each 27 | wrestler to one of the two sets such that every rivalry is between a face and a heel. 28 | 29 | This problem is equivalent to checking if the graph is bipartite. If the graph is 30 | bipartite, a valid designation of wrestlers into faces and heels exists; otherwise, 31 | it does not. 32 | 33 | Algorithm: 34 | 1. Represent the wrestlers and rivalries as an adjacency list. 35 | 2. Use BFS to traverse the graph and assign wrestlers to two groups (faces and heels). 36 | 3. Start with an arbitrary wrestler and assign it to the "face" group. 37 | 4. Assign all its rivals to the "heel" group. 38 | 5. Continue this process, ensuring that no two rivals are in the same group. 39 | 6. If a conflict is detected (i.e., a wrestler is assigned to both groups), the graph 40 | is not bipartite, and the designation is impossible. 41 | 7. If no conflicts are found, return the valid designation of wrestlers into faces 42 | and heels. 43 | 44 | Parameters: 45 | graph (dict): The graph represented as an adjacency list. Keys are vertices (wrestlers), 46 | and values are lists of adjacent vertices (rivals). 47 | n (int): The number of wrestlers. 48 | 49 | Returns: 50 | tuple: (is_bipartite, designation) 51 | - is_bipartite (bool): True if the graph is bipartite, False otherwise. 52 | - designation (dict): A dictionary mapping wrestlers to "face" or "heel". 53 | 54 | Time Complexity: 55 | O(n + r), where n is the number of wrestlers and r is the number of rivalries. 56 | 57 | Space Complexity: 58 | O(n + r), for storing the adjacency list and the designation dictionary. 59 | 60 | """ 61 | # Initialize a dictionary to store the designation of each wrestler 62 | designation = {} 63 | for wrestler in range(n): 64 | if wrestler not in designation: 65 | # Start BFS from the current wrestler 66 | queue = deque([wrestler]) 67 | designation[wrestler] = "face" # Assign the starting wrestler to "face" 68 | 69 | while queue: 70 | current = queue.popleft() 71 | current_group = designation[current] 72 | 73 | for rival in graph[current]: 74 | if rival not in designation: 75 | designation[rival] = "heel" if current_group == "face" else "face" 76 | queue.append(rival) 77 | elif designation[rival] == current_group: 78 | # Conflict detected: rival is in the same group as current wrestler 79 | return False, {} 80 | 81 | return True, designation 82 | # Example: 83 | n = 4 # Wrestlers: 0, 1, 2, 3 84 | rivalries = [(0, 1), (1, 2), (2, 3), (3, 0)] # Rivalries 85 | graph = { 86 | 0: [1, 3], 87 | 1: [0, 2], 88 | 2: [1, 3], 89 | 3: [2, 0] 90 | } 91 | is_bipartite, designation = is_bipartite(graph, n) 92 | print(is_bipartite) 93 | print(designation) -------------------------------------------------------------------------------- /Graph_algorithms/Elementary-Algorithms/BFS/treediameter_shortestpath.py: -------------------------------------------------------------------------------- 1 | from collections import deque 2 | 3 | """Problem: 4 | The diameter of a tree T = (V, E) is defined as max {δ(u, v) : u, v ∈ V}, 5 | that is, the largest of all shortest-path distances in the tree. Give an 6 | efficient algorithm to compute the diameter of a tree, and analyze the 7 | running time of your algorithm 8 | """ 9 | # T(n) = O(|V|) because of using BFS two times (recall that T_BFS(n) = O(|V| + |E|) ) 10 | # memory = O(|V|) (for deque and set) 11 | def BFS(graph, start): 12 | # naive BFS implementation is placed in BFS.py 13 | """ 14 | Perform BFS to find the farthest node from the starting node and its distance. 15 | Args: 16 | graph (dict): The tree represented as an adjacency list. 17 | start (int): The starting node for BFS. 18 | Returns: 19 | tuple: (farthest_node, distance) 20 | - farthest_node (int): The farthest node from the starting node. 21 | - distance (int): The distance to the farthest node. 22 | """ 23 | queue = deque([(start, 0)]) #(node, distance) 24 | visited = set([start]) 25 | farthest_node = start 26 | max_distance = 0 27 | while queue: 28 | current, distance = queue.popleft() 29 | if distance > max_distance: 30 | max_distance = distance 31 | farthest_node = current 32 | for neighbor in graph[current]: 33 | if neighbor not in visited: 34 | visited.add(neighbor) 35 | queue.append((neighbor, distance + 1)) 36 | return farthest_node, max_distance 37 | 38 | def tree_diameter(graph): 39 | # Step 1: Find the farthest node from an arbitrary starting node 40 | arbitrary_node = next(iter(graph)) # Pick any node in the graph 41 | farthest_node, _ = BFS(graph, arbitrary_node) 42 | # Step 2: Find the farthest node from the farthest node found in Step 1 43 | diameter_node, diameter = BFS(graph, farthest_node) 44 | return diameter , diameter_node 45 | 46 | 47 | 48 | graph = { 49 | 0: [1, 2], 50 | 1: [0, 3, 4], 51 | 2: [0, 5], 52 | 3: [1], 53 | 4: [1], 54 | 5: [2] 55 | } 56 | n , m = tree_diameter(graph) 57 | print(n) 58 | print(m) -------------------------------------------------------------------------------- /Graph_algorithms/Elementary-Algorithms/DFS/DFS.py: -------------------------------------------------------------------------------- 1 | """ 2 | Depth-First Search (DFS) Traversal (Recursive Version) 3 | 4 | Description: 5 | DFS is a graph traversal algorithm that explores as far as possible along each branch before 6 | backtracking. It starts at a given source vertex and explores one of its neighbors, then 7 | recursively explores the neighbor's neighbors, and so on. DFS is useful for detecting cycles, 8 | topological sorting, and exploring all connected components of a graph. 9 | 10 | Algorithm: 11 | 1. Start from a source vertex and mark it as visited. 12 | 2. Explore one of its unvisited neighbors, mark it as visited, and recursively apply DFS. 13 | 3. Backtrack and repeat for other unvisited neighbors. 14 | 4. Continue until all reachable vertices are visited. 15 | 16 | Parameters: 17 | graph (dict): The graph represented as an adjacency list. Keys are vertices, and values are 18 | lists of adjacent vertices. 19 | start (int): The starting vertex for the DFS traversal. 20 | 21 | Returns: 22 | list: A list of vertices in the order they were visited during the DFS traversal. 23 | 24 | Time Complexity: 25 | O(V + E), where V is the number of vertices and E is the number of edges. 26 | 27 | Space Complexity: 28 | O(V), for storing the recursion stack and visited set. 29 | """ 30 | 31 | 32 | def DFS_recursive(graph, start): 33 | """ 34 | Perform Depth-First Search (DFS) traversal on a graph using recursion. 35 | 36 | Args: 37 | graph (dict): The graph represented as an adjacency list. 38 | start (int): The starting vertex for the DFS traversal. 39 | 40 | Returns: 41 | list: A list of vertices in the order they were visited. 42 | """ 43 | visited = set() 44 | traversal_order = [] 45 | 46 | def dfs_helper(vertex): 47 | # Mark the current vertex as visited 48 | visited.add(vertex) 49 | traversal_order.append(vertex) 50 | 51 | for neighbor in graph[vertex]: 52 | if neighbor not in visited: 53 | dfs_helper(neighbor) 54 | 55 | # Start DFS from the given vertex 56 | dfs_helper(start) 57 | return traversal_order 58 | 59 | 60 | def DFS_iterative(graph, start): 61 | """ 62 | Perform Depth-First Search (DFS) traversal on a graph using an explicit stack. 63 | 64 | Args: 65 | graph (dict): The graph represented as an adjacency list. 66 | start (int): The starting vertex for the DFS traversal. 67 | 68 | Returns: 69 | list: A list of vertices in the order they were visited. 70 | """ 71 | stack = [start] 72 | visited = set() 73 | traversal_order = [] 74 | 75 | while stack: 76 | # Pop a vertex from the stack 77 | vertex = stack.pop() 78 | 79 | if vertex not in visited: 80 | 81 | visited.add(vertex) 82 | traversal_order.append(vertex) 83 | 84 | # Push all unvisited neighbors onto the stack 85 | for neighbor in reversed(graph[vertex]): # Reverse to maintain the same order as recursive DFS 86 | if neighbor not in visited: 87 | stack.append(neighbor) 88 | 89 | return traversal_order 90 | graph = { 91 | 0: [1, 2], 92 | 1: [0, 3, 4], 93 | 2: [0, 5], 94 | 3: [1], 95 | 4: [1], 96 | 5: [2] 97 | } 98 | dfs_traversal = DFS_recursive(graph, 0) # or DFS_iterative 99 | print(dfs_traversal) -------------------------------------------------------------------------------- /Graph_algorithms/Elementary-Algorithms/DFS/SCC.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | # from CLRS unit 20.5 3 | 4 | # a strongly connected component of a 5 | # directed graph G = (V, E) is a maximal set of vertices C ⊆ V such that 6 | # for every pair of vertices u, v ∈ C, both u ⇝ v and v ⇝ u, that is, vertices 7 | # u and v are reachable from each other 8 | 9 | 10 | def find_sccs_kosaraju(graph): 11 | """ 12 | Finds all Strongly Connected Components (SCCs) in a directed graph using Kosaraju's Algorithm. 13 | 14 | Steps of the algorithm: 15 | 1. Perform a DFS to compute the finishing times of vertices. 16 | 2. Reverse the graph.(Transpose of the graph which was covered in the rep.py file of intro to graphs) 17 | 3. Perform a DFS on the reversed graph in the order of decreasing finishing times to extract SCCs. 18 | 19 | Args: 20 | graph (dict): A dictionary representing the directed graph, where the keys are vertices 21 | and the values are lists of adjacent vertices. 22 | 23 | Returns: 24 | List[List[int]]: A list of SCCs, where each SCC is represented as a list of vertices. 25 | """ 26 | def dfs1(node): 27 | """Performs the first DFS to compute finishing times.""" 28 | visited.add(node) 29 | for neighbor in graph[node]: 30 | if neighbor not in visited: 31 | dfs1(neighbor) 32 | stack.append(node) 33 | 34 | def dfs2(node, scc): 35 | """Performs the second DFS to extract SCCs on the reversed graph.""" 36 | visited.add(node) 37 | scc.append(node) 38 | for neighbor in reversed_graph[node]: 39 | if neighbor not in visited: 40 | dfs2(neighbor, scc) 41 | 42 | # Step 1: Perform a DFS to compute finishing times 43 | visited = set() 44 | stack = [] 45 | for v in graph: 46 | if v not in visited: 47 | dfs1(v) 48 | 49 | # Step 2: Reverse the graph 50 | reversed_graph = defaultdict(list) 51 | for v in graph: 52 | for neighbor in graph[v]: 53 | reversed_graph[neighbor].append(v) 54 | 55 | # Step 3: Perform a DFS on the reversed graph in the order of decreasing finishing times 56 | visited.clear() 57 | sccs = [] 58 | while stack: 59 | node = stack.pop() 60 | if node not in visited: 61 | scc = [] 62 | dfs2(node, scc) 63 | sccs.append(scc) 64 | 65 | return sccs 66 | 67 | # Example Usage 68 | if __name__ == "__main__": 69 | # Define a directed graph as an adjacency list 70 | graph = { 71 | 0: [1], 72 | 1: [2], 73 | 2: [0, 3], 74 | 3: [4], 75 | 4: [], 76 | 5: [6], 77 | 6: [5], 78 | } 79 | 80 | sccs = find_sccs_kosaraju(graph) 81 | print("Strongly Connected Components:", sccs) 82 | 83 | # Real-World Application Example 84 | def real_world_sccs_example(): 85 | """ 86 | A real-world scenario: Imagine a social network where each node is a user, and an edge from node A to node B indicates that user A follows user B. 87 | Finding SCCs in this graph helps identify groups of users who are all mutually connected. 88 | """ 89 | social_network = { 90 | 'Alice': ['Bob'], 91 | 'Bob': ['Charlie'], 92 | 'Charlie': ['Alice', 'David'], 93 | 'David': ['Emily'], 94 | 'Emily': [], 95 | 'Frank': ['Grace'], 96 | 'Grace': ['Frank'], 97 | } 98 | 99 | sccs = find_sccs_kosaraju(social_network) 100 | print("Strongly Connected Components in the Social Network:", sccs) 101 | 102 | real_world_sccs_example() 103 | -------------------------------------------------------------------------------- /Graph_algorithms/Elementary-Algorithms/DFS/TopologicalSort.py: -------------------------------------------------------------------------------- 1 | from collections import deque 2 | from typing import Dict, List 3 | 4 | """ 5 | A topological sort of a directed acyclic graph (DAG) G = (V, E) is a linear 6 | ordering of all its vertices such that if G contains an edge (u, v), then u 7 | appears before v in the ordering. Topological sorting is defined only on 8 | directed graphs that are acyclic; no linear ordering is possible when a 9 | directed graph contains a cycle. 10 | """ 11 | 12 | # Method 1: Kahn's Algorithm (Using In-degrees and a Queue) 13 | def topological_sort_kahn(graph: Dict) -> None | List: 14 | """ 15 | Perform topological sort on a directed acyclic graph (DAG) using Kahn's algorithm. 16 | Time Complexity: T(n) = O(|V| + |E|), where |V| is the number of vertices and |E| is the number of edges. 17 | 18 | Args: 19 | graph (dict): The graph represented as an adjacency list. 20 | Example: {'A': ['B', 'C'], 'B': ['C'], 'C': []} 21 | 22 | Returns: 23 | list: A list of vertices in topological order, or None if the graph has a cycle. 24 | """ 25 | 26 | # Step 1: Compute in-degree for all vertices 27 | # In-degree of a vertex is the number of edges pointing to it. 28 | in_degree = {u: 0 for u in graph} # Initialize in-degree of all vertices to 0 29 | for u in graph: 30 | for v in graph[u]: 31 | in_degree[v] += 1 # Increment in-degree for each edge (u -> v) 32 | 33 | # Step 2: Initialize a queue with vertices of in-degree zero 34 | # These vertices have no prerequisites and can be processed first. 35 | queue = deque([u for u in graph if in_degree[u] == 0]) 36 | 37 | # Step 3: Perform topological sort 38 | topo_sort = [] # List to store the topological order of vertices 39 | while queue: 40 | u = queue.popleft() # Remove a vertex with in-degree zero 41 | topo_sort.append(u) # Add it to the topological order 42 | 43 | # Decrement the in-degree of all adjacent vertices (v) of u 44 | for v in graph[u]: 45 | in_degree[v] -= 1 46 | # If in-degree of v becomes zero, add it to the queue 47 | if in_degree[v] == 0: 48 | queue.append(v) 49 | 50 | # Step 4: Check if the topological sort contains all vertices 51 | # If not, the graph has a cycle, and no valid topological order exists. 52 | if len(topo_sort) == len(graph): 53 | return topo_sort 54 | else: 55 | return None 56 | 57 | 58 | # Method 2: DFS-based Topological Sort 59 | def topological_sort_dfs(graph: Dict) -> None | List: 60 | """ 61 | Perform topological sort on a directed acyclic graph (DAG) using DFS. 62 | Time Complexity: T(n) = O(|V| + |E|) 63 | 64 | Args: 65 | graph (dict): The graph represented as an adjacency list. 66 | Example: {'A': ['B', 'C'], 'B': ['C'], 'C': []} 67 | 68 | Returns: 69 | list: A list of vertices in topological order, or None if the graph has a cycle. 70 | """ 71 | 72 | # Helper function for DFS 73 | def DFS(u): 74 | nonlocal has_cycle 75 | if u in visited: 76 | return 77 | if u in recursion_stack: 78 | has_cycle = True 79 | return 80 | recursion_stack.add(u) # Add to recursion stack 81 | for v in graph[u]: 82 | DFS(v) 83 | recursion_stack.remove(u) 84 | visited.add(u) # Mark as visited 85 | topo_sort.append(u) # Add to topological order 86 | 87 | 88 | visited = set() 89 | recursion_stack = set() # Set to detect cycles 90 | topo_sort = [] 91 | has_cycle = False 92 | 93 | # Perform DFS for all unvisited nodes 94 | for u in graph: 95 | if u not in visited: 96 | DFS(u) 97 | 98 | # If a cycle is detected, return None 99 | if has_cycle: 100 | return None 101 | else: 102 | return topo_sort[::-1] # Reverse the list to get the correct order 103 | 104 | 105 | def schedule_courses(courses, prerequisites, method='kahn'): 106 | """ 107 | Schedule courses based on their prerequisites using topological sort. 108 | 109 | Args: 110 | courses (list): A list of course names. 111 | prerequisites (list): A list of prerequisite pairs, where each pair [a, b] indicates that course 'a' must be taken before course 'b'. 112 | method (str): The method to use for topological sort. Options: 'kahn' (default) or 'dfs'. 113 | 114 | Returns: 115 | list: A list of courses in a valid order, or None if no valid schedule exists. 116 | """ 117 | 118 | # Step 1: Build the graph as an adjacency list 119 | graph = {course: [] for course in courses} # Initialize graph with empty adjacency lists 120 | for a, b in prerequisites: 121 | graph[a].append(b) # Add edge (a -> b) to represent the prerequisite relationship 122 | 123 | # Step 2: Perform topological sort on the graph using the specified method 124 | if method == 'kahn': 125 | return topological_sort_kahn(graph) 126 | elif method == 'dfs': 127 | return topological_sort_dfs(graph) 128 | else: 129 | raise ValueError("Invalid method. Choose 'kahn' or 'dfs'.") 130 | 131 | 132 | 133 | courses = ['math_1', 'math_2', 'phy_1', 'phy_2', 'computer_intro', 'AP', 134 | 'discrete_math', 'DSA', 'DLD', 'Stats&probs', 'signals'] 135 | 136 | prerequisites = [ 137 | ['math_1', 'math_2'], # math_1 must be taken before math_2 138 | ['phy_1', 'phy_2'], # phy_1 must be taken before phy_2 139 | ['math_1', 'discrete_math'], 140 | ['computer_intro', 'AP'], 141 | ['AP', 'DSA'], 142 | ['discrete_math', 'DSA'], 143 | ['math_1', 'Stats&probs'], 144 | ['Stats&probs', 'signals'] 145 | ] 146 | 147 | # Get the course schedule using Kahn's algorithm 148 | schedule_kahn = schedule_courses(courses, prerequisites, method='kahn') 149 | if schedule_kahn: 150 | print(f'Course schedule (Kahn\'s Algorithm): {schedule_kahn}') 151 | else: 152 | print("No valid schedule exists due to cyclic prerequisites.") 153 | 154 | # Get the course schedule using DFS-based topological sort 155 | schedule_dfs = schedule_courses(courses, prerequisites, method='dfs') 156 | if schedule_dfs: 157 | print(f'Course schedule (DFS-based): {schedule_dfs}') 158 | else: 159 | print("No valid schedule exists due to cyclic prerequisites.") -------------------------------------------------------------------------------- /Graph_algorithms/Elementary-Algorithms/intro.md: -------------------------------------------------------------------------------- 1 | Here’s the updated and corrected markdown with improvements to grammar, formatting, and clarity: 2 | 3 | 4 | # **Graph Introduction** 5 | 6 | In computer programs, graphs can be represented in two ways: 7 | 1. **Adjacency List** 8 | 2. **Adjacency Matrix** 9 | 10 | ## **Adjacency Matrix** 11 | An adjacency matrix is a 2D array where the rows and columns represent the vertices of the graph. The value at `matrix[i][j]` indicates whether there is an edge between vertex `i` and vertex `j`. 12 | 13 | ### **Unweighted Graph Representation** 14 | - `matrix[i][j] = 1` if there is an edge between vertex `i` and vertex `j`. 15 | - `matrix[i][j] = 0` if there is no edge. 16 | 17 | ### **Weighted Graph Representation** 18 | - `matrix[i][j]` stores the weight of the edge between vertex `i` and vertex `j`. 19 | - `matrix[i][j] = ∞` (infinity) or a special value (e.g., `-1`) if there is no edge. 20 | 21 | #### **Example Code** 22 | ```python 23 | class Graph: 24 | def __init__(self, vertices_num): 25 | self.vertices_num = vertices_num 26 | self.matrix = [[0] * vertices_num for _ in range(vertices_num)] 27 | 28 | def add_edge(self, u1, u2): 29 | self.matrix[u1][u2] = 1 30 | self.matrix[u2][u1] = 1 # For undirected graphs 31 | 32 | def print_graph(self): 33 | for row in self.matrix: 34 | print(row) 35 | ``` 36 | 37 | ### **Applications** 38 | - Suitable for **dense graphs** (where the number of edges is close to the maximum possible). 39 | - Efficient for checking if an edge exists between two vertices (`O(1)` time complexity). 40 | - Used in algorithms like **Floyd-Warshall** (all-pairs shortest paths). 41 | 42 | ### **Pros** 43 | - Simple and easy to implement. 44 | - Fast edge lookup (`O(1)`). 45 | 46 | ### **Cons** 47 | - Requires `O(V^2)` space, which is inefficient for sparse graphs. 48 | - Adding or removing vertices is expensive. 49 | 50 | --- 51 | 52 | ## **Adjacency List** 53 | An adjacency list represents a graph as an array of lists. Each vertex has a list of its adjacent vertices. 54 | 55 | ### **Representation** 56 | - **Unweighted Graph**: 57 | Use a list of lists, where each inner list contains the neighbors of a vertex. 58 | 59 | - **Weighted Graph**: 60 | Use a list of lists of tuples, where each tuple contains a neighbor and the corresponding edge weight. 61 | 62 | #### **Example Code** 63 | ```python 64 | class Graph: 65 | def __init__(self, num_vertices): 66 | self.num_vertices = num_vertices 67 | self.adj_list = [[] for _ in range(num_vertices)] 68 | 69 | def add_edge(self, v1, v2): 70 | self.adj_list[v1].append(v2) 71 | self.adj_list[v2].append(v1) # For undirected graphs 72 | 73 | def display(self): 74 | for i, neighbors in enumerate(self.adj_list): 75 | print(f"{i}: {neighbors}") 76 | ``` 77 | 78 | ### **Applications** 79 | - Suitable for **sparse graphs** (where the number of edges is much smaller than the maximum possible). 80 | - Used in algorithms like **Dijkstra’s** and **Prim’s** (for weighted graphs). 81 | 82 | ### **Pros** 83 | - Space-efficient for sparse graphs (`O(V + E)`). 84 | - Easy to add or remove vertices and edges. 85 | 86 | ### **Cons** 87 | - Edge lookup can take up to `O(V)` in the worst case. 88 | - Slightly more complex to implement compared to adjacency matrices. 89 | 90 | --- 91 | 92 | ## **Comparison Table** 93 | 94 | | Feature | **Adjacency Matrix** | **Adjacency List** | 95 | |-----------|------------------------------|--------------------------------| 96 | | **Space Complexity** | `O(V^2)` | `O(V + E)` | 97 | | **Edge Lookup** | `O(1)` | `O(V)` in the worst case | 98 | | **Adding a Vertex** | `O(V^2)` (resizing required) | `O(1)` | 99 | | **Adding an Edge** | `O(1)` | `O(1)` | 100 | | **Removing a Vertex** | `O(V^2)` | `O(E)` | 101 | | **Removing an Edge** | `O(1)` | `O(V)` | 102 | | **Best Use Case** | Dense graphs | Sparse graphs | 103 | 104 | 105 | ### Updates and Fixes: 106 | 1. **Grammar**: Corrected minor grammatical errors, e.g., “Graph Introdunction” → “Graph Introduction.” 107 | 2. **Formatting**: Improved markdown structure with bold headers, consistent code formatting, and organized sections. 108 | 3. **Code Fixes**: Corrected errors in the Python code, e.g., replaced `num` with `vertices_num` and fixed the typo `marix` → `matrix`. 109 | 4. **Clarity**: Added clear subheadings, explanations, and example code for better readability. 110 | 111 | **for more information about the graph representation checkout the ```rep.py``` file** -------------------------------------------------------------------------------- /Graph_algorithms/Elementary-Algorithms/rep.py: -------------------------------------------------------------------------------- 1 | # Undirected Adjacency Matrix Using Upper Triangular Matrix 2 | # in an undirected graph the adjacency matrix is symmetric because the 3 | # adjacency of the nodes i and j means that the elements [i][j] , [j][i] = 1 4 | # This symmetry allows us to save memory by storing only the upper triangular part of 5 | # the matrix (including the diagonal). This reduces the space complexity from O(V^2) to O(V(V+1)/2), where V is the number of vertices. 6 | 7 | # is useful for 8 | # algorithms that require adjacency matrix (e.g Floyd-Warshall) 9 | # dense graphs 10 | # cons: Not as straightforward as a full adjacency matrix or adjacency list 11 | 12 | class UpperTriangularMatrix: 13 | def __init__(self, num_vertices): 14 | self.num_vertices = num_vertices 15 | self.size = (num_vertices * (num_vertices + 1)) // 2 # Size of the upper triangular matrix 16 | self.matrix = [0] * self.size 17 | 18 | def get_index(self, i, j): 19 | # i <= j 20 | if i > j: 21 | i, j = j, i 22 | return i * self.num_vertices + j - (i * (i + 1)) // 2 23 | 24 | def add_edge(self, v1, v2): 25 | index = self.get_index(v1, v2) 26 | self.matrix[index] = 1 # unweighted graph!! 27 | 28 | def has_edge(self, v1, v2): 29 | index = self.get_index(v1, v2) 30 | return self.matrix[index] == 1 31 | 32 | def display(self): 33 | for i in range(self.num_vertices): 34 | row = [] 35 | for j in range(self.num_vertices): 36 | if i <= j: 37 | row.append(self.matrix[self.get_index(i, j)]) 38 | else: 39 | row.append(self.matrix[self.get_index(j, i)]) 40 | print(row) 41 | 42 | # example using graph: 43 | ## below is a mini-social networks of friends 44 | # We have a social network with n users. each user can be friends with other users. 45 | # we need to efficiently store and query friendships using an upper triangular adjacency matrix. 46 | 47 | class SocialNetwork: 48 | def __init__(self , num_users): 49 | """ 50 | initializing the social network with a given number of users 51 | """ 52 | self.num_users = num_users 53 | self.size = (num_users * (num_users + 1)) // 2 54 | self.matrix = [0] * self.size 55 | def get_index(self , user1 , user2): 56 | """ 57 | calculate the index in 1D array""" 58 | if user1 > user2: 59 | user2 , user1 = user1 , user2 60 | return user1 * self.num_users + user2 - (user1 * (user1 + 1)) // 2 61 | def add_friendship(self, user1, user2): 62 | """ 63 | Add a friendship between two users. 64 | :param user1: First user. 65 | :param user2: Second user. 66 | """ 67 | index = self.get_index(user1, user2) 68 | self.matrix[index] = 1 # friended! 69 | def are_friends(self, user1, user2): 70 | """ 71 | Check if two users are friends. 72 | :return: True if they are friends, False otherwise. 73 | """ 74 | index = self.get_index(user1, user2) 75 | return self.matrix[index] == 1 76 | def display_friendships(self): 77 | """ 78 | Display the friendships in the network as a matrix. 79 | """ 80 | for i in range(self.num_users): 81 | row = [] 82 | for j in range(self.num_users): 83 | if i <= j: 84 | row.append(self.matrix[self.get_index(i, j)]) 85 | else: 86 | row.append(self.matrix[self.get_index(j, i)]) 87 | print(row) 88 | 89 | # The **transpose** of a directed graph \( G = (V, E) \) is a graph \( G^T = (V, E^T) \), 90 | # where \( E^T = \{(v, u) \in V \times V : (u, v) \in E\} \). In other words, \( G^T \) is obtained 91 | # by reversing all the edges of \( G \). 92 | # it's like complement in boolean algebra or not operator(!) 93 | 94 | def transpose_graph(graph): 95 | graph_T = {v: set() for v in graph} 96 | 97 | for u in graph: 98 | for v in graph[u]: 99 | if u!=v: 100 | # used set so that the multiple edges are avoided 101 | graph_T[u].add[v] 102 | graph_T[v].add[u] 103 | # convert set to list 104 | for u in graph_T: 105 | graph_T[u] = list(graph_T[u]) 106 | 107 | return graph_T 108 | 109 | def find_universal_sink(adj_matrix): #O(V) 110 | n = len(adj_matrix) # n = |V| 111 | candidates = 0 112 | 113 | for u in range(n): 114 | if adj_matrix[candidates][u] == 1: 115 | candidates = u 116 | 117 | for u in range(n): 118 | if adj_matrix[candidates][u] == 1: # If there is an outgoing edge 119 | return None # it is not a universal sink 120 | 121 | def convert_multigraph_to_simple_graph(graph): 122 | """ 123 | Convert a multigraph to an equivalent undirected simple graph. 124 | also calculates the complement of the the given graph G, denoted G` 125 | 126 | Args: 127 | graph (dict): The multigraph represented as an adjacency list. 128 | 129 | Returns: 130 | dict: The adjacency list representation of the equivalent undirected graph. 131 | """ 132 | # Initialize the adjacency list for G' 133 | simple_graph = {v: set() for v in graph} 134 | 135 | # Iterate through each vertex in the original graph 136 | for u in graph: 137 | for v in graph[u]: 138 | if u != v: # Skip self-loops 139 | # Add the edge (u, v) if it hasn't been added already 140 | simple_graph[u].add(v) 141 | simple_graph[v].add(u) # Undirected graph so both directions should be added 142 | 143 | 144 | for u in simple_graph: 145 | simple_graph[u] = list(simple_graph[u]) 146 | 147 | return simple_graph 148 | 149 | 150 | # in transpose we need to remove all self-loops 151 | # If there are multiple edges between two vertices u and v, replace them with a single edge. 152 | 153 | 154 | 155 | 156 | 157 | 158 | # # for transpose graph: 159 | # graph = { 160 | # 1: [2, 2, 3], # Multiple edges between 1 and 2 161 | # 2: [1, 1, 3], # Multiple edges between 2 and 1, and 2 and 3 162 | # 3: [1, 2, 3], # Self-loop at 3 163 | # 4: [] # Isolated vertex 164 | # } 165 | # print(transpose_graph(graph)) 166 | 167 | # # for using UpperTriangularMatrix 168 | # g = UpperTriangularMatrix(4) 169 | # g.add_edge(0, 1) 170 | # g.add_edge(0, 2) 171 | # g.add_edge(1, 2) 172 | # g.add_edge(2, 3) 173 | # g.display() 174 | 175 | # for UpperTriangularMatrix application 176 | # network = SocialNetwork(4) 177 | # network.add_friendship(0, 1) # User 0 is friends with User 1 178 | # network.add_friendship(0, 2) # User 0 is friends with User 2 179 | # network.add_friendship(1, 2) # User 1 is friends with User 2 180 | # network.add_friendship(2, 3) # User 2 is friends with User 3 181 | # print("Friendships Matrix:") 182 | # network.display_friendships() 183 | # print("\nAre User 0 and User 1 friends?", network.are_friends(0, 1)) 184 | # print("Are User 1 and User 3 friends?", network.are_friends(1, 3)) 185 | 186 | 187 | 188 | -------------------------------------------------------------------------------- /Graph_algorithms/MinimumSpanningTree/kruskal.py: -------------------------------------------------------------------------------- 1 | class DisjointSet: 2 | """ 3 | A Disjoint Set (Union-Find) data structure to manage and detect cycles in Kruskal's algorithm. 4 | 5 | Attributes: 6 | parent (dict): A dictionary to store the parent of each vertex. 7 | rank (dict): A dictionary to store the rank of each vertex for union by rank. 8 | """ 9 | 10 | def __init__(self, vertices): 11 | """ 12 | Initialize the DisjointSet with each vertex as its own parent and rank 0. 13 | 14 | Args: 15 | vertices (list): A list of vertices in the graph. 16 | """ 17 | self.parent = {v: v for v in vertices} 18 | self.rank = {v: 0 for v in vertices} 19 | 20 | def find(self, item): 21 | """ 22 | Find the root of the set containing `item` with path compression. 23 | 24 | Args: 25 | item: The vertex to find the root for. 26 | 27 | Returns: 28 | The root of the set containing `item`. 29 | """ 30 | if self.parent[item] != item: 31 | self.parent[item] = self.find(self.parent[item]) # Path compression 32 | return self.parent[item] 33 | 34 | def union(self, set1, set2): 35 | """ 36 | Union the sets containing `set1` and `set2` using union by rank. 37 | 38 | Args: 39 | set1: The first vertex. 40 | set2: The second vertex. 41 | """ 42 | root1 = self.find(set1) 43 | root2 = self.find(set2) 44 | if root1 != root2: 45 | # Union by rank 46 | if self.rank[root1] > self.rank[root2]: 47 | self.parent[root2] = root1 48 | else: 49 | self.parent[root1] = root2 50 | if self.rank[root1] == self.rank[root2]: 51 | self.rank[root2] += 1 52 | 53 | 54 | def kruskal(graph): 55 | """ 56 | Kruskal's Algorithm for finding the Minimum Spanning Tree (MST) of a connected, undirected graph. 57 | 58 | Kruskal's algorithm is a greedy algorithm that builds the MST by adding the smallest available edge 59 | that does not form a cycle. It uses a Disjoint Set (Union-Find) data structure to efficiently manage 60 | and detect cycles. 61 | 62 | Steps: 63 | 1. Sort all edges in the graph in non-decreasing order of their weight. 64 | 2. Initialize a Disjoint Set to keep track of connected components. 65 | 3. Iterate through the sorted edges: 66 | - For each edge, check if adding it forms a cycle (using the Disjoint Set). 67 | - If it does not form a cycle, add it to the MST and merge the two sets in the Disjoint Set. 68 | 4. Repeat until there are (V-1) edges in the MST, where V is the number of vertices. 69 | 70 | Time Complexity: O(E log E) or O(E log V), where E is the number of edges and V is the number of vertices. 71 | 72 | Args: 73 | graph (dict): The graph represented as an adjacency list. 74 | Example: {'A': [('B', 2), ('D', 6)], 'B': [('A', 2), ('C', 3)], ...} 75 | 76 | Returns: 77 | list: A list of edges in the MST, where each edge is represented as a tuple (u, v, weight). 78 | """ 79 | edges = [] 80 | for u in graph: 81 | for v, weight in graph[u]: 82 | edges.append((weight, u, v)) 83 | edges.sort() # Sort edges by weight 84 | 85 | vertices = set(graph.keys()) 86 | ds = DisjointSet(vertices) 87 | mst = [] 88 | 89 | for weight, u, v in edges: 90 | if ds.find(u) != ds.find(v): # Check if adding the edge forms a cycle 91 | mst.append((u, v, weight)) 92 | ds.union(u, v) 93 | 94 | return mst 95 | 96 | 97 | 98 | if __name__ == "__main__": 99 | graph = { 100 | 'A': [('B', 2), ('D', 6)], 101 | 'B': [('A', 2), ('C', 3), ('D', 8)], 102 | 'C': [('B', 3), ('D', 5)], 103 | 'D': [('A', 6), ('B', 8), ('C', 5)], 104 | } 105 | 106 | mst = kruskal(graph) 107 | print("Minimum Spanning Tree (Kruskal's Algorithm):") 108 | for u, v, weight in mst: 109 | print(f"{u} -- {v} : {weight}") -------------------------------------------------------------------------------- /Graph_algorithms/MinimumSpanningTree/prim.py: -------------------------------------------------------------------------------- 1 | import heapq 2 | def prim(graph, start): 3 | """ 4 | Prim's Algorithm for finding the Minimum Spanning Tree (MST) of a connected, undirected graph. 5 | 6 | Prim's algorithm is a greedy algorithm that builds the MST by starting from an arbitrary vertex 7 | and repeatedly adding the smallest edge that connects a vertex in the MST to a vertex outside the MST. 8 | 9 | Steps: 10 | 1. Initialize a priority queue (min-heap) to store edges and a set to track vertices included in the MST. 11 | 2. Start with an arbitrary vertex and add all its edges to the priority queue. 12 | 3. While the priority queue is not empty: 13 | - Extract the edge with the smallest weight. 14 | - If the edge connects a vertex not in the MST, add it to the MST and add all edges of the new vertex to the priority queue. 15 | 4. Repeat until all vertices are included in the MST. 16 | 17 | Time Complexity: O(E log V) using a binary heap, where E is the number of edges and V is the number of vertices. 18 | 19 | Args: 20 | graph (dict): The graph represented as an adjacency list. 21 | Example: {'A': [('B', 2), ('D', 6)], 'B': [('A', 2), ('C', 3)], ...} 22 | start: The starting vertex for the algorithm. 23 | 24 | Returns: 25 | list: A list of edges in the MST, where each edge is represented as a tuple (u, v, weight). 26 | """ 27 | mst = [] 28 | visited = set() 29 | edges = [(weight, start, v) for v, weight in graph[start]] 30 | heapq.heapify(edges) 31 | visited.add(start) 32 | 33 | while edges: 34 | weight, u, v = heapq.heappop(edges) 35 | if v not in visited: 36 | visited.add(v) 37 | mst.append((u, v, weight)) 38 | for neighbor, weight in graph[v]: 39 | if neighbor not in visited: 40 | heapq.heappush(edges, (weight, v, neighbor)) 41 | 42 | return mst 43 | 44 | 45 | 46 | if __name__ == "__main__": 47 | graph = { 48 | 'A': [('B', 2), ('D', 6)], 49 | 'B': [('A', 2), ('C', 3), ('D', 8)], 50 | 'C': [('B', 3), ('D', 5)], 51 | 'D': [('A', 6), ('B', 8), ('C', 5)], 52 | } 53 | 54 | mst = prim(graph, 'A') 55 | print("Minimum Spanning Tree (Prim's Algorithm):") 56 | for u, v, weight in mst: 57 | print(f"{u} -- {v} : {weight}") -------------------------------------------------------------------------------- /Graph_algorithms/MinimumSpanningTree/prim_fibonacci.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | # I have used AI for fibonacci heaps since they are not used often in industry 4 | # please double check for the fibonacci heap 5 | class FibonacciHeapNode: 6 | def __init__(self, key, value): 7 | """ 8 | Represents a node in the Fibonacci Heap. 9 | 10 | Attributes: 11 | key (int): The key (priority) of the node. 12 | value (any): The value stored in the node. 13 | degree (int): The number of children of the node. 14 | parent (FibonacciHeapNode): Pointer to the parent node. 15 | child (FibonacciHeapNode): Pointer to one of the child nodes. 16 | left (FibonacciHeapNode): Pointer to the left sibling. 17 | right (FibonacciHeapNode): Pointer to the right sibling. 18 | marked (bool): Indicates whether the node has lost a child since it became a child of another node. 19 | """ 20 | self.key = key 21 | self.value = value 22 | self.degree = 0 23 | self.parent = None 24 | self.child = None 25 | self.left = self 26 | self.right = self 27 | self.marked = False 28 | 29 | 30 | class FibonacciHeap: 31 | def __init__(self): 32 | """ 33 | Initializes an empty Fibonacci Heap. 34 | 35 | Attributes: 36 | min_node (FibonacciHeapNode): Pointer to the node with the minimum key. 37 | num_nodes (int): The total number of nodes in the heap. 38 | """ 39 | self.min_node = None 40 | self.num_nodes = 0 41 | 42 | def insert(self, key, value): 43 | """ 44 | Inserts a new node into the Fibonacci Heap. 45 | 46 | Args: 47 | key (int): The key (priority) of the node. 48 | value (any): The value to be stored in the node. 49 | """ 50 | node = FibonacciHeapNode(key, value) 51 | if self.min_node is None: 52 | self.min_node = node 53 | else: 54 | self._add_to_root_list(node) 55 | if node.key < self.min_node.key: 56 | self.min_node = node 57 | self.num_nodes += 1 58 | 59 | def _add_to_root_list(self, node): 60 | """ 61 | Adds a node to the root list of the Fibonacci Heap. 62 | 63 | Args: 64 | node (FibonacciHeapNode): The node to be added to the root list. 65 | """ 66 | node.left = self.min_node 67 | node.right = self.min_node.right 68 | self.min_node.right.left = node 69 | self.min_node.right = node 70 | 71 | def extract_min(self): 72 | """ 73 | Extracts and returns the node with the minimum key from the Fibonacci Heap. 74 | 75 | Returns: 76 | FibonacciHeapNode: The node with the minimum key. 77 | """ 78 | z = self.min_node 79 | if z is not None: 80 | if z.child is not None: 81 | children = self._get_children(z) 82 | for child in children: 83 | self._add_to_root_list(child) 84 | child.parent = None 85 | self._remove_from_root_list(z) 86 | if z == z.right: 87 | self.min_node = None 88 | else: 89 | self.min_node = z.right 90 | self._consolidate() 91 | self.num_nodes -= 1 92 | return z 93 | 94 | def _get_children(self, node): 95 | """ 96 | Retrieves all children of a given node. 97 | 98 | Args: 99 | node (FibonacciHeapNode): The node whose children are to be retrieved. 100 | 101 | Returns: 102 | list: A list of children nodes. 103 | """ 104 | children = [] 105 | current = node.child 106 | while True: 107 | children.append(current) 108 | if current.right == node.child: 109 | break 110 | current = current.right 111 | return children 112 | 113 | def _remove_from_root_list(self, node): 114 | """ 115 | Removes a node from the root list. 116 | 117 | Args: 118 | node (FibonacciHeapNode): The node to be removed from the root list. 119 | """ 120 | node.left.right = node.right 121 | node.right.left = node.left 122 | 123 | def _consolidate(self): 124 | """ 125 | Consolidates the Fibonacci Heap by combining trees of the same degree. 126 | """ 127 | degree_table = [None] * self.num_nodes 128 | nodes = self._get_root_nodes() 129 | for node in nodes: 130 | degree = node.degree 131 | while degree_table[degree] is not None: 132 | other = degree_table[degree] 133 | if node.key > other.key: 134 | node, other = other, node 135 | self._link(other, node) 136 | degree_table[degree] = None 137 | degree += 1 138 | degree_table[degree] = node 139 | self.min_node = None 140 | for node in degree_table: 141 | if node is not None: 142 | if self.min_node is None: 143 | self.min_node = node 144 | elif node.key < self.min_node.key: 145 | self.min_node = node 146 | 147 | def _get_root_nodes(self): 148 | """ 149 | Retrieves all nodes in the root list. 150 | 151 | Returns: 152 | list: A list of nodes in the root list. 153 | """ 154 | nodes = [] 155 | current = self.min_node 156 | while True: 157 | nodes.append(current) 158 | if current.right == self.min_node: 159 | break 160 | current = current.right 161 | return nodes 162 | 163 | def _link(self, child, parent): 164 | """ 165 | Links two trees by making one node the child of another. 166 | 167 | Args: 168 | child (FibonacciHeapNode): The node to become the child. 169 | parent (FibonacciHeapNode): The node to become the parent. 170 | """ 171 | self._remove_from_root_list(child) 172 | child.parent = parent 173 | if parent.child is None: 174 | parent.child = child 175 | child.left = child 176 | child.right = child 177 | else: 178 | child.left = parent.child 179 | child.right = parent.child.right 180 | parent.child.right.left = child 181 | parent.child.right = child 182 | parent.degree += 1 183 | child.marked = False 184 | 185 | def decrease_key(self, node, new_key): 186 | """ 187 | Decreases the key of a node in the Fibonacci Heap. 188 | 189 | Args: 190 | node (FibonacciHeapNode): The node whose key is to be decreased. 191 | new_key (int): The new key value. 192 | 193 | Raises: 194 | ValueError: If the new key is greater than the current key. 195 | """ 196 | if new_key > node.key: 197 | raise ValueError("New key is greater than current key") 198 | node.key = new_key 199 | parent = node.parent 200 | if parent is not None and node.key < parent.key: 201 | self._cut(node, parent) 202 | self._cascading_cut(parent) 203 | if node.key < self.min_node.key: 204 | self.min_node = node 205 | 206 | def _cut(self, node, parent): 207 | """ 208 | Cuts a node from its parent and adds it to the root list. 209 | 210 | Args: 211 | node (FibonacciHeapNode): The node to be cut. 212 | parent (FibonacciHeapNode): The parent node. 213 | """ 214 | self._remove_from_child_list(parent, node) 215 | parent.degree -= 1 216 | self._add_to_root_list(node) 217 | node.parent = None 218 | node.marked = False 219 | 220 | def _remove_from_child_list(self, parent, node): 221 | """ 222 | Removes a node from its parent's child list. 223 | 224 | Args: 225 | parent (FibonacciHeapNode): The parent node. 226 | node (FibonacciHeapNode): The node to be removed. 227 | """ 228 | if parent.child == parent.child.right: 229 | parent.child = None 230 | elif parent.child == node: 231 | parent.child = node.right 232 | node.left.right = node.right 233 | node.right.left = node.left 234 | 235 | def _cascading_cut(self, node): 236 | """ 237 | Performs a cascading cut operation to maintain the heap properties. 238 | 239 | Args: 240 | node (FibonacciHeapNode): The node to start the cascading cut from. 241 | """ 242 | parent = node.parent 243 | if parent is not None: 244 | if not node.marked: 245 | node.marked = True 246 | else: 247 | self._cut(node, parent) 248 | self._cascading_cut(parent) 249 | 250 | 251 | def prim_fibonacci_heap(graph, start_vertex): 252 | """ 253 | Implements Prim's algorithm to find the Minimum Spanning Tree (MST) of a graph using a Fibonacci Heap. 254 | 255 | Args: 256 | graph (dict): The graph represented as a dictionary of adjacency lists. 257 | Example: {'A': {'B': 2, 'C': 4}, 'B': {'A': 2, 'C': 1}, 'C': {'A': 4, 'B': 1}} 258 | start_vertex (any): The starting vertex for the algorithm. 259 | 260 | Returns: 261 | tuple: A tuple containing: 262 | - mst (dict): A dictionary representing the MST, where keys are vertices and values are their minimum edge weights. 263 | - total_weight (int): The total weight of the MST. 264 | """ 265 | heap = FibonacciHeap() 266 | vertex_to_node = {} 267 | mst = {} 268 | total_weight = 0 269 | 270 | # Initialize the heap with all vertices 271 | for vertex in graph: 272 | if vertex == start_vertex: 273 | heap.insert(0, vertex) 274 | else: 275 | heap.insert(math.inf, vertex) 276 | vertex_to_node[vertex] = heap.min_node 277 | 278 | # Build the MST 279 | while heap.num_nodes > 0: 280 | min_node = heap.extract_min() 281 | current_vertex = min_node.value 282 | mst[current_vertex] = min_node.key 283 | total_weight += min_node.key 284 | 285 | # Update keys of adjacent vertices 286 | for neighbor, weight in graph[current_vertex].items(): 287 | if neighbor not in mst: 288 | neighbor_node = vertex_to_node[neighbor] 289 | if weight < neighbor_node.key: 290 | heap.decrease_key(neighbor_node, weight) 291 | 292 | return mst, total_weight 293 | 294 | 295 | graph = { 296 | 'A': {'B': 2, 'D': 6}, 297 | 'B': {'A': 2, 'C': 3, 'D': 8, 'E': 5}, 298 | 'C': {'B': 3, 'E': 7}, 299 | 'D': {'A': 6, 'B': 8, 'E': 9}, 300 | 'E': {'B': 5, 'C': 7, 'D': 9} 301 | } 302 | 303 | mst, total_weight = prim_fibonacci_heap(graph, 'A') 304 | print("Minimum Spanning Tree:", mst) 305 | print("Total Weight:", total_weight) -------------------------------------------------------------------------------- /Graph_algorithms/Single-Source-Shortest-Path/BellmanFord.py: -------------------------------------------------------------------------------- 1 | # lets create a custom and naive graph for 2 | # bellman-ford algorithm 3 | 4 | """ 5 | useful sources: 6 | https://youtu.be/FtN3BYH2Zes?si=FSRzUDhS_zH1eFGU : Abdul bari 7 | CLRS book: chapter 22 - section 1 - The Bellman-Ford algorithm 8 | """ 9 | 10 | 11 | class Graph: 12 | def __init__(self , vertices): 13 | self.V = vertices 14 | self.graph = [] # for edges 15 | # for adding weighted edges 16 | def add_edge(self , u , v, w): 17 | self.graph.append([u , v, w]) 18 | 19 | def Bellman_Ford(self , start): 20 | """ 21 | Implements the Bellman-Ford algorithm to find the shortest paths from a source vertex to all other vertices in a weighted graph. 22 | 23 | The Bellman-Ford algorithm works as follows: 24 | 1. Initialize distances from the source vertex to all other vertices as infinity, except the source itself, which is set to 0. 25 | 2. Relax all edges |V| - 1 times, where |V| is the number of vertices. Relaxation is the process of updating the distance to a vertex if a shorter path is found. 26 | - if d[u] + C[(u , v)] < d[v] then d[v] = d[u] + C[(u , v)] 27 | - make sure all edges are checked 28 | 3. After relaxing all edges, check for negative-weight cycles. If a shorter path is still found, it means there is a negative-weight cycle in the graph. 29 | 30 | The algorithm can handle graphs with negative edge weights and detects negative-weight cycles. 31 | 32 | Time Complexity: O(V * E), where V is the number of vertices and E is the number of edges. 33 | Space Complexity: O(V), where V is the number of vertices (for storing distances). 34 | 35 | Args: 36 | src (int): The source vertex from which to compute shortest paths. 37 | 38 | Returns: 39 | None: Prints the shortest distances from the source vertex to all other vertices. 40 | If a negative-weight cycle is detected, it prints a message and returns. 41 | """ 42 | distances = [float('inf')] * self.V 43 | distances[start] = 0 44 | 45 | for _ in range(self.V - 1): 46 | for u , v, w in self.graph: 47 | if distances[u] != float('inf') and distances[u] + w < distances[v]: 48 | distances[v] = distances[u] + w 49 | 50 | for u , v, w in self.graph: 51 | if distances[u] != float('inf') and distances[u] + w < distances[v]: 52 | print("negetive weight cycle detected") 53 | return 54 | self.print_solution(distances) 55 | 56 | def print_solution(self, dist): 57 | print("Vertex Distance from Source") 58 | for i in range(self.V): 59 | print(f"{i}\t\t{dist[i]}") 60 | if __name__ == '__main__': 61 | graph = Graph(5) 62 | graph.add_edge(0, 1, -1) 63 | graph.add_edge(0, 2, 4) 64 | graph.add_edge(1, 2, 3) 65 | graph.add_edge(1, 3, 2) 66 | graph.add_edge(1, 4, 2) 67 | graph.add_edge(3, 2, 5) 68 | graph.add_edge(3, 1, 1) 69 | graph.add_edge(4, 3, -3) 70 | print(graph.graph) 71 | graph.Bellman_Ford(0) 72 | -------------------------------------------------------------------------------- /Graph_algorithms/Single-Source-Shortest-Path/Dijkstra.py: -------------------------------------------------------------------------------- 1 | import heapq 2 | 3 | class Graph: 4 | def __init__(self, vertices): 5 | self.V = vertices # Number of vertices 6 | self.graph = [[] for _ in range(vertices)] # Adjacency list 7 | 8 | def add_edge(self, u, v, w): 9 | """ 10 | Adds a directed edge from vertex u to vertex v with weight w. 11 | """ 12 | self.graph[u].append((v, w)) 13 | 14 | def Dijkstra(self, src): 15 | """ 16 | Implements Dijkstra's algorithm to find the shortest paths from a source vertex to all other vertices in a weighted graph with non-negative edge weights. 17 | 18 | The algorithm works as follows: 19 | 1. Initialize distances from the source vertex to all other vertices as infinity, except the source itself (distance 0). 20 | 2. Use a priority queue (min-heap) to track the next vertex to explore based on the smallest tentative distance. 21 | 3. Extract the vertex with the smallest distance from the heap, relax its outgoing edges, and update distances if a shorter path is found. 22 | 4. Repeat until all reachable vertices are processed. 23 | Dijkstra's algorithm cannot handle graphs with negative edge weights. 24 | Time Complexity: O((V + E) log V), where V is the number of vertices and E is the number of edges. 25 | Space Complexity: O(V + E) for the adjacency list and O(V) for the distance array and priority queue. 26 | Args: 27 | src (int): The source vertex from which to compute shortest paths. 28 | Returns: 29 | list: An array where each index represents a vertex, and the value is the shortest distance from the source to that vertex. 30 | """ 31 | # distances initialization 32 | dist = [float("inf")] * self.V 33 | dist[src] = 0 34 | # heap: (distance, vertex) 35 | heap = [] 36 | heapq.heappush(heap, (0, src)) 37 | while heap: 38 | current_dist, u = heapq.heappop(heap) 39 | # Skip if a shorter path to vertex u has already been found 40 | if current_dist > dist[u]: 41 | continue 42 | # Step 3: Relax all outgoing edges from u 43 | for v, w in self.graph[u]: 44 | if dist[v] > dist[u] + w: 45 | dist[v] = dist[u] + w 46 | heapq.heappush(heap, (dist[v], v)) 47 | return dist 48 | def print_solution(self, dist): 49 | print("Vertex \t Distance from Source") 50 | for i in range(self.V): 51 | print(f"{i}\t\t{dist[i]}") 52 | 53 | 54 | if __name__ == "__main__": 55 | graph = Graph(5) 56 | graph.add_edge(0, 1, 4) 57 | graph.add_edge(0, 2, 1) 58 | graph.add_edge(1, 3, 1) 59 | graph.add_edge(2, 1, 2) 60 | graph.add_edge(2, 3, 5) 61 | graph.add_edge(3, 4, 3) 62 | 63 | print(graph.graph) 64 | distances = graph.Dijkstra(0) 65 | graph.print_solution(distances) -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 mahan zavari 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Medians-and-Order-Statistics/Selection_in_expected_linear_time.py: -------------------------------------------------------------------------------- 1 | import random 2 | # the time complexity in worst case for this algorithm is O(n^2) 3 | def partition(A, low, high): 4 | # Check if all elements in the subarray are equal 5 | all_equal = True 6 | first_element = A[low] 7 | for i in range(low + 1, high + 1): 8 | if A[i] != first_element: 9 | all_equal = False 10 | break 11 | if all_equal: 12 | return (low + high) // 2 13 | 14 | # Normal partitioning 15 | pivot = A[high] 16 | i = low - 1 17 | 18 | for j in range(low, high): 19 | if A[j] <= pivot: # Change to >= for monotonically decreasing order 20 | i += 1 21 | swap(A, i, j) 22 | 23 | swap(A, high, i + 1) 24 | return i + 1 25 | 26 | def swap(A , i , j): 27 | A[i] , A[j] = A[j] , A[i] 28 | def randomized_partition(A, low, high): 29 | 30 | i = random.randint(low, high) # Randomly select a pivot index 31 | swap(A, i, high) 32 | return partition(A, low, high) 33 | def randomized_Select(A : list, low : int, high : int , i : int) -> int | float: 34 | """finds the ith minimum number in an array A 35 | in Order of Θ(n) 36 | 37 | Args: 38 | A (Array/List): The intended array 39 | low (int): 40 | high (int): 41 | i (int): ith static order 42 | 43 | Returns: 44 | the ith minimum number 45 | """ 46 | if low == high: 47 | return A[low] 48 | q = randomized_partition(A , low , high) 49 | k = q - low + 1 # number of elements 50 | if i == k: 51 | return A[q] # the pivot value is the answer 52 | elif i < k: 53 | return randomized_Select(A , low , q - 1 , i) 54 | else: return randomized_Select(A , q + 1 , high , i - k) 55 | 56 | 57 | 58 | b = [] 59 | b = [1 , 5, 46, 7 , 4 , 6 , 12 , 0 , 4 , 2] 60 | b = randomized_Select(b , 0 , (len(b) - 1) , 6 ) 61 | print(b) -------------------------------------------------------------------------------- /Medians-and-Order-Statistics/Selection_in_worst_case_linear_time.py: -------------------------------------------------------------------------------- 1 | def SELECT(A, p, r, i): 2 | """The selection algorithm presented in this section achieves 3 | linear time in the worst case, but it is not nearly as practical as 4 | RANDOMIZED-SELECT. It is mostly of theoretical interest. 5 | """ 6 | 7 | # Step 1: Ensure (r - p + 1) is divisible by 5 8 | while (r - p + 1) % 5 != 0: 9 | for j in range(p + 1, r + 1): 10 | if A[p] > A[j]: 11 | A[p], A[j] = A[j], A[p] # Swap A[p] with A[j] 12 | if i == 1: 13 | return A[p] # Return the minimum of A[p:r+1] 14 | p += 1 15 | i -= 1 16 | 17 | # Step 2: Group elements into groups of 5 and sort each group 18 | g = (r - p + 1) // 5 # Number of 5-element groups 19 | for j in range(g): 20 | start = p + j * 5 21 | group = A[start:start + 5] 22 | group.sort() 23 | A[start:start + 5] = group 24 | 25 | # Step 3: Find the median of medians 26 | medians_start = p + 2 27 | medians_end = p + 2 + g - 1 28 | median_of_medians_index = (g + 1) // 2 29 | x = SELECT(A, medians_start, medians_end, median_of_medians_index) 30 | 31 | # Step 4: Partition around the pivot (x) 32 | q = PARTITION_AROUND(A, p, r, x) 33 | 34 | # Step 5: Recursive selection based on partition result 35 | k = q - p + 1 36 | if i == k: 37 | return A[q] # The pivot value is the answer 38 | elif i < k: 39 | return SELECT(A, p, q - 1, i) 40 | else: 41 | return SELECT(A, q + 1, r, i - k) 42 | 43 | def PARTITION_AROUND(A, p, r, pivot): 44 | """Partitions the array around the given pivot value.""" 45 | pivot_index = A.index(pivot) 46 | A[pivot_index], A[r] = A[r], A[pivot_index] # Move pivot to the end 47 | i = p - 1 48 | for j in range(p, r): 49 | if A[j] <= pivot: 50 | i += 1 51 | A[i], A[j] = A[j], A[i] 52 | A[i + 1], A[r] = A[r], A[i + 1] # Place pivot in its final position 53 | return i + 1 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Data Structures and Algorithms Implementation 2 | 3 | This repository contains implementations of various data structures and algorithms in Python. It is organized into distinct modules for sorting algorithms and different types of trees. 4 | 5 | ## Directory Structure 6 | ``` 7 | mahanzavari-datastructures-algorithms/ 8 | ├── README.md 9 | ├── LICENSE # License information for the project 10 | ├── Augmenting_Data_Structures/ # Directory for augmented data structure implementations 11 | │ ├── RedBlackTree_size.py # Red-Black tree implementation with size augmentation 12 | │ └── IntervalTree/ # Directory for Interval Tree implementation 13 | │ ├── IntervalTree.py # Interval Tree implementation 14 | │ └── Interval_tree_in_practice.py # Practice script for Interval Trees 15 | ├── Medians-and-Order-Statistics/ # Directory for median and order statistics implementations 16 | │ ├── Selection_in_expected_linear_time.py # Implementation of randomized selection 17 | │ └── Selection_in_worst_case_linear_time.py # Implementation of deterministic selection 18 | ├── Sorts_Algorithms/ # Directory for sorting algorithm implementations 19 | │ ├── README.md # README for sorting algorithms 20 | │ ├── Sort_comparisons.py # Python script for comparing sorting algorithms 21 | │ └── bucket_sort.py # Implementation of bucket sort 22 | └── Trees/ # Directory for tree data structure implementations 23 | ├── AVLTree/ # Directory for AVL Tree implementation 24 | │ ├── AVLTree.py # AVL Tree implementation 25 | │ ├── GUI.py # GUI for AVL Tree visualization 26 | │ ├── main.py # Main script to run the AVL Tree GUI 27 | │ └── utils.py # Utility functions for AVL Tree 28 | ├── BTrees/ # Directory for B+ Tree implementation 29 | │ └── BPlussTree.py # Python script for B+ Tree implementation 30 | └── RedBlackTree/ # Directory for Red-Black Tree implementation 31 | ├── README.md 32 | └── RedBlackTree.py # Python script for Red-Black Tree implementation 33 | ``` 34 | 35 | ## Project Overview 36 | 37 | This project is designed to demonstrate the implementation and behavior of common data structures and algorithms. It is divided into two main sections: 38 | 39 | 1. **Sorting Algorithms**: This section provides implementations of various sorting algorithms and a script to compare their performance. 40 | 2. **Trees**: This section includes implementations of different types of balanced and un-balanced tree data structures. 41 | 3. **Augmenting Data Structures**: This section includes implementations of data structures augmented with additional information to support new operations. 42 | 4. **Medians and Order Statistics**: This section provides implementations for selecting the kth smallest element in an array. 43 | 44 | Each section has its own `README.md` with detailed information about the implementation, usage, and specific algorithms. 45 | 46 | ## Getting Started 47 | 48 | ### Prerequisites 49 | 50 | - Python 3.8 or higher 51 | - Required libraries are listed within the `README.md` of each individual module (found within `/Sorts_Algorithms`, `/Trees/AVLTree` and `/Trees/RedBlackTree`). 52 | - Graphviz (see `Trees/RedBlackTree/README.md` and `Trees/AVLTree/README.md` for installation) 53 | 54 | ### Cloning the repository 55 | 56 | ```bash 57 | git clone [https://github.com/mahanzavari/DataStructures-Algorithms] 58 | cd [DataStructures-Algorithms] 59 | ``` 60 | ### Running the programs 61 | 62 | Each module can be run separately. Please refer to the module's `README.md` for specific execution instructions: 63 | 64 | * **Sorting Algorithms:** Instructions in `Sorts_Algorithms/README.md`. 65 | * **AVL Tree:** Instructions in `Trees/AVLTree/README.md`. 66 | * **B+ Tree:** Instructions within `Trees/BTrees/BPlussTree.py` 67 | * **Red-Black Tree:** Instructions in `Trees/RedBlackTree/README.md`. 68 | 69 | ## Modules 70 | 71 | ### 1. Sorting Algorithms (`Sorts_Algorithms/`) 72 | 73 | This module contains implementations and time comparisons for: 74 | 75 | * Quicksort 76 | * Randomized Quicksort 77 | * Insertion Sort 78 | * Merge Sort 79 | * Bubble Sort 80 | * Selection Sort 81 | * Heapsort 82 | * Bottom-Up Quicksort 83 | 84 | Refer to `Sorts_Algorithms/README.md` for detailed information. 85 | To run the program: 86 | ```bash 87 | cd Sorts_Algorithms 88 | python Sort_comparisons.py 89 | ``` 90 | 91 | ### 2. Trees (`Trees/`) 92 | 93 | This module contains implementations of different types of tree data structures: 94 | 95 | * **AVL Tree (`Trees/AVLTree/`)**: A self-balancing binary search tree with a GUI for visualization. This module contains: 96 | * `AVLTree.py`: The core implementation of the AVL tree. 97 | * `GUI.py`: A graphical user interface for interacting with and visualizing the tree. 98 | * `main.py`: Runs the GUI application. 99 | * `utils.py`: Includes utility function to perform basic operations on the tree. 100 | Refer to `Trees/AVLTree/README.md` for detailed information on how to run the GUI. 101 | To run the program: 102 | ```bash 103 | cd Trees/AVLTree 104 | python main.py 105 | ``` 106 | * **B+ Tree (`Trees/BTrees/`)**: An implementation of a B+ tree data structure used in database indexing. 107 | To run the program: 108 | ```bash 109 | cd Trees/BTrees 110 | python BPlussTree.py --order [B+Tree order] 111 | ``` 112 | * **Red-Black Tree (`Trees/RedBlackTree/`)**: A self-balancing binary search tree with command interface and visualizations using Graphviz. This module contains: 113 | * `RedBlackTree.py`: Implementation of the Red-Black Tree. 114 | Refer to `Trees/RedBlackTree/README.md` for details and instructions. 115 | To run the program: 116 | ```bash 117 | cd Trees/RedBlackTree 118 | python RedBlackTree.py 119 | ``` 120 | 121 | ## Contributing 122 | 123 | Contributions are welcome! If you find any bugs or have suggestions for improvements, feel free to create an issue or submit a pull request. 124 | 125 | ## License 126 | 127 | This project is licensed under the MIT License. See `LICENSE` for details. 128 | 129 | -------------------------------------------------------------------------------- /Sorts_Algorithms/README.md: -------------------------------------------------------------------------------- 1 | # Sorting Algorithm Comparisons 2 | 3 | This Python program compares the performance of several common sorting algorithms: 4 | 5 | * **Quicksort** 6 | * **Randomized Quicksort** 7 | * **Insertion Sort** 8 | * **Merge Sort** 9 | * **Bubble Sort** 10 | * **Selection Sort** 11 | * **Heapsort** 12 | * **Bottom-Up Quicksort** 13 | 14 | It allows you to input numbers, and when you type "SORT," it will sort the entered numbers using each of the algorithms, and display the sorted array as well as the running time of each algorithm. 15 | 16 | ## Sorting Algorithm Introductions 17 | 18 | Here is a summary of each sorting algorithm used in this program: 19 | 20 | ### Quicksort 21 | 22 | * **Description:** A divide-and-conquer algorithm that works by partitioning the input array around a pivot element and then recursively sorting the sub-arrays. It's known for its efficiency in practice but has a worst-case time complexity. 23 | * **Memory Order:** Θ(n) due to recursion. 24 | * **Time Complexity:** 25 | * **Worst-case:** O(n^2) 26 | * **Average-case:** O(n log n) 27 | * **Best-case:** O(n log n) 28 | * **In-place:** Yes 29 | * **Stability:** No 30 | 31 | ### Randomized Quicksort 32 | 33 | * **Description:** Similar to quicksort, but it selects a random element as the pivot to prevent worst-case scenarios for some input arrays. 34 | * **Memory Order:** Θ(n) due to recursion. 35 | * **Time Complexity:** 36 | * **Worst-case:** O(n^2) 37 | * **Average-case:** O(n log n) 38 | * **Best-case:** O(n log n) 39 | * **In-place:** Yes 40 | * **Stability:** No 41 | 42 | ### Insertion Sort 43 | 44 | * **Description:** A simple sorting algorithm that builds the final sorted array one item at a time. It is efficient for small datasets and nearly sorted data. 45 | * **Memory Order:** Θ(1). 46 | * **Time Complexity:** 47 | * **Worst-case:** O(n^2) 48 | * **Average-case:** O(n^2) 49 | * **Best-case:** O(n) 50 | * **In-place:** Yes 51 | * **Stability:** Yes 52 | 53 | ### Merge Sort 54 | 55 | * **Description:** A divide-and-conquer algorithm that divides the array into halves, recursively sorts each half, and then merges the sorted halves. It's stable and has consistent performance. 56 | * **Memory Order:** Θ(n) due to use of temp arrays. 57 | * **Time Complexity:** 58 | * **Worst-case:** O(n log n) 59 | * **Average-case:** O(n log n) 60 | * **Best-case:** O(n log n) 61 | * **In-place:** No (requires additional space for merging) 62 | * **Stability:** Yes 63 | 64 | ### Bubble Sort 65 | 66 | * **Description:** A simple algorithm that repeatedly steps through the array, compares adjacent elements, and swaps them if they are in the wrong order. Inefficient for large datasets. 67 | * **Memory Order:** Θ(1). 68 | * **Time Complexity:** 69 | * **Worst-case:** O(n^2) 70 | * **Average-case:** O(n^2) 71 | * **Best-case:** O(n) 72 | * **In-place:** Yes 73 | * **Stability:** Yes 74 | 75 | ### Selection Sort 76 | 77 | * **Description:** An algorithm that repeatedly finds the minimum element from the unsorted part of the array and places it at the beginning. Not very efficient. 78 | * **Memory Order:** Θ(1). 79 | * **Time Complexity:** 80 | * **Worst-case:** O(n^2) 81 | * **Average-case:** O(n^2) 82 | * **Best-case:** O(n^2) 83 | * **In-place:** Yes 84 | * **Stability:** No 85 | 86 | ### Heapsort 87 | 88 | * **Description:** A comparison-based sorting algorithm that uses a heap data structure. It is efficient and has a consistent time complexity. 89 | * **Memory Order:** Θ(1) 90 | * **Time Complexity:** 91 | * **Worst-case:** O(n log n) 92 | * **Average-case:** O(n log n) 93 | * **Best-case:** O(n log n) 94 | * **In-place:** Yes 95 | * **Stability:** No 96 | 97 | ### Bottom-Up Quicksort 98 | 99 | * **Description:** An iterative implementation of Quicksort, using a stack to simulate recursion. It avoids the overhead of recursive calls. 100 | * **Memory Order:** Θ(n) due to use of stack. 101 | * **Time Complexity:** 102 | * **Worst-case:** O(n^2) 103 | * **Average-case:** O(n log n) 104 | * **Best-case:** O(n log n) 105 | * **In-place:** Yes 106 | * **Stability:** No 107 | 108 | ## How to Use 109 | 110 | 1. **Clone the repository:** 111 | ```bash 112 | git clone [https://github.com/mahanzavari/DSA] 113 | cd [DSA] 114 | ``` 115 | 2. **Run the script:** 116 | ```bash 117 | python Sort_comparisons.py 118 | ``` 119 | 3. **Input:** 120 | * Enter numbers one by one when prompted. 121 | * Type `SORT` to execute all the sorting algorithms on your numbers and see running times. 122 | * Type `EXIT` to terminate the program. 123 | 124 | ## Code Overview 125 | 126 | * **`Sort_comparisons.py`**: The main Python script containing the implementation of all sorting algorithms, timing and input/output. 127 | * `swap(A, i, j)`: Helper function to swap elements in the array `A`. 128 | * `partition(A, low, high)`: Helper function to partition for quicksort. 129 | * `randomized_partition(A, low, high)`: Helper function to partition with random pivot for randomized quicksort. 130 | * `quicksort(A, p, r)`: Recursive implementation of the quicksort algorithm. 131 | * `randomized_quicksort(A, p, r)`: Recursive implementation of the randomized quicksort algorithm. 132 | * `insertion_sort(A)`: Implementation of the insertion sort algorithm. 133 | * `merge(A, left, right)`: Helper function to merge for merge sort. 134 | * `merge_sort(A)`: Recursive implementation of the merge sort algorithm. 135 | * `bubble_sort(A)`: Implementation of the bubble sort algorithm. 136 | * `selection_sort(A)`: Implementation of the selection sort algorithm. 137 | * `heapify(A, n, i)`: Helper function to heapify for heapsort. 138 | * `heapsort(A)`: Implementation of the heapsort algorithm. 139 | * `bottom_up_quicksort(A)`: Iterative implementation of quicksort using a stack. 140 | * `measure_running_time(sort_func, A, p=None, r=None, trials=100)`: Helper function to measure the running time of an algorithm 141 | * The `if __name__ == "__main__":` block handles the user input, calls the sorting functions and prints the output. 142 | 143 | ## Notes 144 | 145 | * The time measurements are based on the average of 100 trials for more stability. 146 | * The arrays are copied before being passed to each sorting algorithm to ensure each runs on the same input array. 147 | * The output shows the sorted array and the running time of each sorting algorithm. 148 | 149 | ## Contributing 150 | 151 | Contributions are welcome! If you find any bugs or have suggestions for improvements, feel free to create an issue or submit a pull request. 152 | -------------------------------------------------------------------------------- /Sorts_Algorithms/Sort_comparisons.py: -------------------------------------------------------------------------------- 1 | import random 2 | import time 3 | 4 | def swap(A, i, j): 5 | A[i], A[j] = A[j], A[i] 6 | 7 | def partition(A, low, high): 8 | all_equal = True 9 | first_element = A[low] 10 | for i in range(low + 1, high + 1): 11 | if A[i] != first_element: 12 | all_equal = False 13 | break 14 | if all_equal: 15 | return (low + high) // 2 16 | pivot = A[high] 17 | i = low - 1 18 | for j in range(low, high): 19 | if A[j] <= pivot: 20 | i += 1 21 | swap(A, i, j) 22 | swap(A, high, i + 1) 23 | return i + 1 24 | 25 | def randomized_partition(A, low, high): 26 | i = random.randint(low, high) 27 | swap(A, i, high) 28 | return partition(A, low, high) 29 | 30 | # QuickSort 31 | 32 | # Memory Order: Θ(n) 33 | # Time complexity: T(n) = T(n – 1) + T(0) + Θ(n) = T (n – 1) + Θ(n) == O(n^2); 34 | # substitution method can be used to prove that the recurrence 35 | # T(n) = 2T (n/2) + Θ(n) = O(nlogn) if balanced (use master theorem for evaluating the recursive expression) 36 | # The average running time of quicksort is O(nlgn) because even if the 37 | # partition function, splits the n elements into a 9n/10 and n/10 sections 38 | # solving the recurrence relation shows that the time complexity would be O(nlgn) 39 | def quicksort(A : list, p : int, r : int ) -> list: 40 | """The quicksort algorithm has a worst-case running time of Θ(n^2) on an 41 | input Aay of n numbers. Despite this slow worst-case running time, 42 | quicksort is often the best practical choice for sorting because it is 43 | remarkably efficient on average: its expected running time is Θ(n lg n) 44 | when all numbers are distinct, and the constant factors hidden in the 45 | Θ(n lg n) notation are small. Unlike merge sort, it also has the 46 | advantage of sorting in place , and it works well even in 47 | virtual-memory environments""" 48 | if p < r: 49 | q = partition(A, p, r) 50 | quicksort(A, p, q - 1) 51 | quicksort(A, q + 1, r) 52 | 53 | def randomized_quicksort(A, p, r): 54 | if p < r: 55 | q = randomized_partition(A, p, r) 56 | randomized_quicksort(A, p, q - 1) 57 | randomized_quicksort(A, q + 1, r) 58 | 59 | def insertion_sort(A : list) -> list: 60 | for i in range(1, len(A)): 61 | key = A[i] 62 | j = i - 1 63 | while j >= 0 and A[j] > key: 64 | A[j + 1] = A[j] 65 | j -= 1 66 | A[j + 1] = key 67 | 68 | def merge_sort(A : list) -> list: 69 | if len(A) > 1: 70 | mid = len(A) // 2 71 | left = merge_sort(A[:mid]) 72 | right = merge_sort(A[mid:]) 73 | return merge(left, right) 74 | return A 75 | 76 | def merge(left, right): 77 | result = [] 78 | i = j = 0 79 | while i < len(left) and j < len(right): 80 | if left[i] <= right[j]: 81 | result.append(left[i]) 82 | i += 1 83 | else: 84 | result.append(right[j]) 85 | j += 1 86 | result.extend(left[i:]) 87 | result.extend(right[j:]) 88 | return result 89 | 90 | def bubble_sort(A : list) -> list: 91 | n = len(A) 92 | for i in range(n): 93 | for j in range(0, n - i - 1): 94 | if A[j] > A[j + 1]: 95 | swap(A, j, j + 1) 96 | 97 | def selection_sort(A : list) -> list: 98 | n = len(A) 99 | for i in range(n): 100 | min_idx = i 101 | for j in range(i + 1, n): 102 | if A[j] < A[min_idx]: 103 | min_idx = j 104 | swap(A, i, min_idx) 105 | 106 | def heapify(A, n, i): 107 | largest = i 108 | left = 2 * i + 1 109 | right = 2 * i + 2 110 | if left < n and A[left] > A[largest]: 111 | largest = left 112 | if right < n and A[right] > A[largest]: 113 | largest = right 114 | if largest != i: 115 | swap(A, i, largest) 116 | heapify(A, n, largest) 117 | 118 | def heapsort(A : list) -> list: 119 | n = len(A) 120 | for i in range(n // 2 - 1, -1, -1): 121 | heapify(A, n, i) 122 | for i in range(n - 1, 0, -1): 123 | swap(A, 0, i) 124 | heapify(A, i, 0) 125 | 126 | def bottom_up_quicksort(A : list) -> list: 127 | stack = [(0, len(A) - 1)] 128 | while stack: 129 | low, high = stack.pop() 130 | if low < high: 131 | q = partition(A, low, high) 132 | stack.append((low, q - 1)) 133 | stack.append((q + 1, high)) 134 | 135 | # non-comparison sorting - counting sort 136 | # Time-comlexity = Θ(n) 137 | def count_sort(A: list) -> list: 138 | """ 139 | Sorts an array of integers in ascending order using Counting Sort. 140 | 141 | Args: 142 | A (List): Input array to be sorted. Must contain integers (positive or negative). 143 | 144 | Returns: 145 | List: Sorted array in ascending order. 146 | 147 | Raises: 148 | ValueError: If the input array contains non-integer values. 149 | """ 150 | # Ensure all elements are integers 151 | if not all(isinstance(num, int) for num in A): 152 | raise ValueError("All elements in the input array must be integers.") 153 | 154 | if not A: 155 | return A 156 | 157 | min_val = min(A) 158 | max_val = max(A) 159 | 160 | # Handle negative numbers by shifting the range 161 | range_of_values = max_val - min_val + 1 162 | count_array = [0] * range_of_values 163 | 164 | # Frequency of each element 165 | for num in A: 166 | count_array[num - min_val] += 1 167 | 168 | # Cumulative frequency (for ascending order this time) 169 | for i in range(1, range_of_values): 170 | count_array[i] += count_array[i - 1] 171 | 172 | # Build the output array in ascending order 173 | output = [0] * len(A) 174 | for i in range(0, len(A)): # Iterate from the start 175 | output[count_array[A[i] - min_val] - 1] = A[i] 176 | count_array[A[i] - min_val] -= 1 177 | 178 | return output 179 | 180 | # Radix sort 181 | def counting_sort(A, exp): 182 | """ 183 | A helper function to perform counting sort on the array based on the digit represented by exp. 184 | """ 185 | n = len(A) 186 | output = [0] * n 187 | count = [0] * 10 # Count array to store 10 numbers 188 | 189 | #frequency 190 | for i in range(n): 191 | index = (A[i] // exp) % 10 192 | count[index] += 1 193 | #update coutn 194 | for i in range(1, 10): 195 | count[i] += count[i - 1] 196 | 197 | i = n - 1 198 | while i >= 0: 199 | index = (A[i] // exp) % 10 200 | output[count[index] - 1] = A[i] 201 | count[index] -= 1 202 | i -= 1 203 | 204 | # Copy the sorted elements back into the original array 205 | for i in range(n): 206 | A[i] = output[i] 207 | 208 | def radix_sort(A: list) -> list: 209 | """ 210 | Sorts an array of non-negative integers using Radix Sort. 211 | 212 | Args: 213 | A (List): Input array to be sorted. Must contain non-negative integers. 214 | 215 | Returns: 216 | List: Sorted array. 217 | 218 | Raises: 219 | ValueError: If the input array contains non-integer or negative values. 220 | """ 221 | # Input validation: Ensure all elements are non-negative integers 222 | if not all(isinstance(num, int) and num >= 0 for num in A): 223 | raise ValueError("All elements in the input array must be non-negative integers.") 224 | 225 | if not A: 226 | return 227 | 228 | # Find the maximum number to determine the number of digits 229 | maximum = max(A) 230 | 231 | # Perform counting sort for each digit, starting from the least significant digit (LSD) 232 | digit = 1 233 | while maximum // digit > 0: 234 | counting_sort(A, digit) 235 | digit *= 10 # Move to the next digit (e.g., units, tens, hundreds, etc.) 236 | 237 | return A 238 | 239 | def cocktail_shaker_sort(A: list) -> list: 240 | """ 241 | Sorts a list using cocktail_shaker Sort (also known as Shaker Sort). 242 | 243 | Args: 244 | A (list): The list to be sorted. 245 | 246 | Returns: 247 | list: The sorted list. 248 | """ 249 | n = len(A) 250 | swapped = True 251 | start = 0 252 | end = n - 1 253 | 254 | while swapped: 255 | swapped = False 256 | 257 | # Traverse from left to right (like Bubble Sort) 258 | for i in range(start, end): 259 | if A[i] > A[i + 1]: 260 | A[i], A[i + 1] = A[i + 1], A[i] 261 | swapped = True 262 | 263 | if not swapped: 264 | break # If no swap occurred in the forward pass, the array is sorted 265 | 266 | swapped = False 267 | end -= 1 # Reduce the end boundary 268 | 269 | # Traverse from right to left 270 | for i in range(end - 1, start - 1, -1): 271 | if A[i] > A[i + 1]: 272 | A[i], A[i + 1] = A[i + 1], A[i] 273 | swapped = True 274 | 275 | start += 1 276 | 277 | return A 278 | 279 | def pigeonhole_sort(A: list) -> list: 280 | """ 281 | Sorts a list of integers using Pigeonhole Sort. 282 | 283 | Args: 284 | A (list): The list to be sorted (must contain integers). 285 | 286 | Returns: 287 | list: The sorted list. 288 | """ 289 | if not A: 290 | return A 291 | 292 | min_val = min(A) 293 | max_val = max(A) 294 | size = max_val - min_val + 1 295 | 296 | # Create pigeonholes (buckets) 297 | pigeonholes = [0] * size 298 | 299 | for num in A: 300 | pigeonholes[num - min_val] += 1 301 | 302 | # Reconstruct the sorted array 303 | sorted_A = [] 304 | for index, count in enumerate(pigeonholes): 305 | value = index + min_val 306 | sorted_A.extend([value] * count) # Append 'value' 'count' times 307 | 308 | return sorted_A 309 | 310 | 311 | def flash_sort(A: list) -> list: 312 | """ 313 | Sorts a list of numbers using Flash Sort. 314 | 315 | Args: 316 | A (list): The list to be sorted (should contain comparable numbers). 317 | 318 | Returns: 319 | list: The sorted list. 320 | """ 321 | n = len(A) 322 | if n <= 1: 323 | return A # Base case for empty or single-element list 324 | 325 | min_val = min(A) 326 | max_val = max(A) 327 | 328 | if min_val == max_val: 329 | return A # handle all elements having same value 330 | 331 | # Calculate class sizes (number of buckets) 332 | m = int(0.43 * n) # empirically chosen factor, can vary 333 | if m < 1: 334 | m = 1 # min 1 class 335 | 336 | # Create the "L" array (class distribution) 337 | L = [0] * m 338 | 339 | # Classify the elements 340 | for num in A: 341 | k = int(((num - min_val) / (max_val - min_val)) * (m - 1)) 342 | L[k] += 1 343 | 344 | # Calculate starting positions in the buckets 345 | for k in range(1, m): 346 | L[k] += L[k - 1] 347 | 348 | # Permutation phase 349 | hold = A[0] 350 | move = 0 351 | j = 0 352 | k = m - 1 # last class for start 353 | while move < n - 1: 354 | while j > L[k]-1: 355 | k -= 1 # find the new class 356 | if k < 0: 357 | k = 0 358 | while j <= L[k]-1: # find the new place for hold 359 | j +=1 360 | if move > 0 and j >= n: 361 | break # if not first move and all elements are traversed then break out 362 | 363 | temp = A[j] 364 | A[j] = hold 365 | hold = temp 366 | L[k] -= 1 367 | move += 1 368 | 369 | # Insertion sort phase within each class 370 | for i in range(1,n): 371 | hold = A[i] 372 | j = i-1 373 | while j >= 0 and A[j] > hold: 374 | A[j+1] = A[j] 375 | j -=1 376 | A[j+1] = hold 377 | 378 | return A 379 | 380 | 381 | # The array A is sorted in place 382 | def measure_running_time(sort_func, A, p=None, r=None, trials=100): 383 | total_time = 0 384 | for _ in range(trials): 385 | start_time = time.perf_counter() 386 | if p is None or r is None: 387 | sort_func(A) 388 | else: 389 | sort_func(A, p, r) 390 | end_time = time.perf_counter() 391 | total_time += (end_time - start_time) 392 | return total_time / trials 393 | 394 | # def measure_running_time(sort_func, A, p=None, r=None): 395 | # start_time = time.time() 396 | # if p is None or r is None: 397 | # sort_func(A) 398 | # else: 399 | # sort_func(A, p, r) 400 | # end_time = time.time() 401 | # return end_time - start_time 402 | 403 | if __name__ == "__main__": 404 | A = [] 405 | size = 0 406 | 407 | while True: 408 | try: 409 | n = input("Enter your number, Otherwise press SORT to sort or EXIT for termination: ") 410 | if n.upper() == "SORT": 411 | if size == 0: 412 | print("No numbers entered. Please enter numbers first.") 413 | continue 414 | 415 | A_normal = A.copy() 416 | A_randomized = A.copy() 417 | A_insertion = A.copy() 418 | A_merge = A.copy() 419 | A_bubble = A.copy() 420 | A_selection = A.copy() 421 | A_heap = A.copy() 422 | A_bottom_up = A.copy() 423 | A_counting = A.copy() 424 | A_radix = A.copy() 425 | A_cocktail_shaker = A.copy() 426 | A_pigeonhole = A.copy() 427 | A_flash = A.copy() 428 | 429 | quicksort_time = measure_running_time(quicksort, A_normal, 0, size - 1) 430 | print(f"Quicksort sorted array: {A_normal}") 431 | print(f"Quicksort running time: {quicksort_time:.6f} seconds") 432 | 433 | randomized_quicksort_time = measure_running_time(randomized_quicksort, A_randomized, 0, size - 1) 434 | print(f"Randomized Quicksort sorted array: {A_randomized}") 435 | print(f"Randomized Quicksort running time: {randomized_quicksort_time:.6f} seconds") 436 | 437 | insertion_sort_time = measure_running_time(insertion_sort, A_insertion) 438 | print(f"Insertion Sort sorted array: {A_insertion}") 439 | print(f"Insertion Sort running time: {insertion_sort_time:.6f} seconds") 440 | 441 | merge_sort_time = measure_running_time(merge_sort, A_merge) 442 | print(f"Merge Sort sorted array: {A_merge}") 443 | print(f"Merge Sort running time: {merge_sort_time:.6f} seconds") 444 | 445 | bubble_sort_time = measure_running_time(bubble_sort, A_bubble) 446 | print(f"Bubble Sort sorted array: {A_bubble}") 447 | print(f"Bubble Sort running time: {bubble_sort_time:.6f} seconds") 448 | 449 | selection_sort_time = measure_running_time(selection_sort, A_selection) 450 | print(f"Selection Sort sorted array: {A_selection}") 451 | print(f"Selection Sort running time: {selection_sort_time:.6f} seconds") 452 | 453 | heapsort_time = measure_running_time(heapsort, A_heap) 454 | print(f"Heapsort sorted array: {A_heap}") 455 | print(f"Heapsort running time: {heapsort_time:.6f} seconds") 456 | 457 | bottom_up_quicksort_time = measure_running_time(bottom_up_quicksort, A_bottom_up) 458 | print(f"Bottom-Up Quicksort sorted array: {A_bottom_up}") 459 | print(f"Bottom-Up Quicksort running time: {bottom_up_quicksort_time:.6f} seconds") 460 | 461 | counting_sort_time = measure_running_time(count_sort , A_counting) 462 | print(f"Counting-sort sorted array: {A_counting}") 463 | print(f"Counting-sort running time: {counting_sort_time:.6f} seconds") 464 | 465 | radix_sort_time = measure_running_time(radix_sort , A_radix) 466 | print(f"radix-sort sorted array: {A_radix}") 467 | print(f"radix-sort running time: {radix_sort_time:.6f} seconds") 468 | 469 | cocktail_shaker_sort_time = measure_running_time(cocktail_shaker_sort , A_cocktail_shaker) 470 | print(f"cocktail_shaker/shaker-sort sorted array: {A_cocktail_shaker}") 471 | print(f"cocktail_shaker/shaker-sort running time: {cocktail_shaker_sort_time:.6f} seconds") 472 | 473 | pigeonhole_sort_time = measure_running_time(pigeonhole_sort , A_pigeonhole) 474 | print(f"cocktail_shaker/shaker-sort sorted array: {A_pigeonhole}") 475 | print(f"cocktail_shaker/shaker-sort running time: {pigeonhole_sort_time:.6f} seconds") 476 | 477 | flash_sort_time = measure_running_time(flash_sort , A_flash) 478 | print(f"cocktail_shaker/shaker-sort sorted array: {A_flash}") 479 | print(f"cocktail_shaker/shaker-sort running time: {flash_sort_time:.6f} seconds") 480 | 481 | elif n.upper() == "EXIT": 482 | break 483 | else: 484 | n = int(n) 485 | size += 1 486 | A.append(n) 487 | except ValueError: 488 | print("Invalid input. Please enter a valid number or 'SORT'/'EXIT'.") -------------------------------------------------------------------------------- /Sorts_Algorithms/bucket_sort.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | def insertion_sort(A: list) -> list: 4 | """ 5 | Sorts a list using Insertion Sort. 6 | """ 7 | for i in range(1, len(A)): 8 | key = A[i] 9 | j = i - 1 10 | while j >= 0 and A[j] > key: 11 | A[j + 1] = A[j] 12 | j -= 1 13 | A[j + 1] = key 14 | return A 15 | 16 | 17 | def bucket_sort(A: list) -> list: 18 | """ 19 | Sorts an array of floating-point numbers using Bucket Sort. 20 | 21 | Args: 22 | A (List): Input array to be sorted. Must contain floating-point numbers in the range [0, 1). 23 | 24 | Returns: 25 | List: Sorted array. 26 | """ 27 | if not A: 28 | return A # Return empty list if input is empty 29 | 30 | # Input validation: Ensure all elements are floats in the range [0, 1) 31 | if not all(isinstance(num, float) and 0.0 <= num < 1.0 for num in A): 32 | raise ValueError("All elements in the input array must be floats in the range [0, 1).") 33 | 34 | n = len(A) 35 | buckets = [[] for _ in range(n)] # Create n empty buckets 36 | 37 | # Distribute elements into buckets 38 | for num in A: 39 | bucket_index = int(n * num) 40 | buckets[bucket_index].append(num) 41 | 42 | # Sort elements within each bucket (using insertion sort) 43 | for i in range(len(buckets)): 44 | buckets[i] = insertion_sort(buckets[i]) 45 | 46 | # Concatenate the sorted buckets 47 | sorted_A = [] 48 | for bucket in buckets: 49 | sorted_A.extend(bucket) 50 | 51 | return sorted_A 52 | 53 | def measure_running_time(sort_func, A): 54 | """ 55 | Measures the running time of a sorting function. 56 | 57 | Args: 58 | sort_func (function): The sorting function to measure. 59 | A (List): The input array to be sorted. 60 | 61 | Returns: 62 | float: The running time in seconds. 63 | """ 64 | start_time = time.perf_counter() 65 | sort_func(A) 66 | end_time = time.perf_counter() 67 | return end_time - start_time 68 | 69 | 70 | if __name__ == "__main__": 71 | A = [] 72 | size = 0 73 | 74 | while True: 75 | try: 76 | n = input("Enter a floating-point number in the range [0, 1), or press SORT to sort or EXIT to terminate: ") 77 | if n.upper() == "SORT": 78 | if size == 0: 79 | print("No numbers entered. Please enter numbers first.") 80 | continue 81 | 82 | A_copy = A.copy() # Create a copy of the input array 83 | 84 | # Measure the running time of Bucket Sort 85 | bucket_sort_running_time = measure_running_time(bucket_sort, A_copy) 86 | 87 | # Display the sorted array and running time 88 | print(f"Bucket-sort sorted array: {bucket_sort(A_copy)}") 89 | print(f"Bucket-sort running time: {bucket_sort_running_time:.6f} seconds") 90 | 91 | elif n.upper() == "EXIT": 92 | break 93 | else: 94 | n = float(n) # Convert input to float 95 | if 0.0 <= n < 1.0: # Ensure the number is in the range [0, 1) 96 | size += 1 97 | A.append(n) 98 | else: 99 | print("Invalid input. Please enter a floating-point number in the range [0, 1).") 100 | except ValueError: 101 | print("Invalid input. Please enter a valid floating-point number or 'SORT'/'EXIT'.") -------------------------------------------------------------------------------- /Trees/AVLTree/AVLTree.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | 4 | def floats_are_equal(a, b, eps=1e-3): 5 | """Returns True if a and b are within eps of each other""" 6 | return abs(a - b) < eps 7 | 8 | 9 | class AVLNode: 10 | def __init__(self , key): 11 | self.key = key 12 | self.left = None 13 | self.right = None 14 | self.height = 1 15 | 16 | class AVLTree: 17 | """ 18 | Instatiates a AVL Tree 19 | Args: 20 | 'use_recursive'(Default = True): instantiates the tree that uses recursion for deletion and insertion 21 | """ 22 | def __init__(self , use_recursive = True): 23 | self.use_recursive = use_recursive 24 | 25 | def insert(self , root , key): 26 | if self.use_recursive: 27 | return self.insert_recursive(root , key) 28 | else: 29 | return self.insert_iterative(root , key) 30 | def delete(self , root , key): 31 | if self.use_recursive: 32 | return self.delete_recursive(root , key) 33 | else: 34 | return self.delete_iterative(root , key) 35 | def get_height(self , node): 36 | if not node: 37 | return 0 38 | return node.height 39 | 40 | def get_size(self , root): 41 | if not root: 42 | return 0 43 | return 1 + self.get_size(root.left) + self.get_size(root.right) 44 | 45 | def get_balance(self , node): 46 | if not node: 47 | return 0 48 | return self.get_height(node.left) - self.get_height(node.right) 49 | # Rotation is a bit different than the one used in Red-Black trees 50 | def left_rotate(self , z): 51 | y = z.right 52 | x = y.left 53 | y.left = z 54 | z.right = x 55 | # the height of z and y has changed 56 | z.height = 1 + max(self.get_height(z.left) , self.get_height(z.right)) 57 | y.height = 1 + max(self.get_height(y.left) , self.get_height(y.right)) 58 | return y 59 | 60 | def right_rotate(self , z): 61 | y = z.left 62 | x = y.right 63 | y.right = z 64 | z.left = x 65 | # the height of z and y has changed 66 | z.height = 1 + max(self.get_height(z.left) , self.get_height(z.right)) 67 | y.height = 1 + max(self.get_height(y.left) , self.get_height(y.right)) 68 | return y 69 | 70 | def search(self , root , key): 71 | if not root or floats_are_equal(root.key, key): 72 | return root 73 | if key < root.key: 74 | return self.search(root.left , key) 75 | 76 | return self.search(root.right , key) 77 | def insert_iterative(self, root: Optional[AVLNode], key: float) -> AVLNode: 78 | """ 79 | Iterative insertion in AVL tree with rebalancing. 80 | 81 | Args: 82 | root (AVLNode): Root of the AVL tree. 83 | key (float): Key to insert. 84 | 85 | Returns: 86 | AVLNode: New root of the AVL tree after insertion. 87 | """ 88 | if not root: 89 | return AVLNode(key) 90 | 91 | stack = [] 92 | node = root 93 | 94 | while node: 95 | stack.append(node) 96 | if key < node.key: 97 | if not node.left: 98 | node.left = AVLNode(key) 99 | break 100 | node = node.left 101 | elif key > node.key: 102 | if not node.right: 103 | node.right = AVLNode(key) 104 | break 105 | node = node.right 106 | else: 107 | return root 108 | 109 | while stack: 110 | current = stack.pop() 111 | current.height = 1 + max(self.get_height(current.left), self.get_height(current.right)) 112 | 113 | balance = self.get_balance(current) 114 | 115 | if balance > 1 and key < current.left.key: # Left Left 116 | if stack and stack[-1].left == current: 117 | stack[-1].left = self.right_rotate(current) 118 | elif stack: 119 | stack[-1].right = self.right_rotate(current) 120 | else: 121 | return self.right_rotate(current) 122 | 123 | 124 | if balance < -1 and key > current.right.key: # Right Right 125 | if stack and stack[-1].left == current: 126 | stack[-1].left = self.left_rotate(current) 127 | elif stack: 128 | stack[-1].right = self.left_rotate(current) 129 | else: 130 | return self.left_rotate(current) 131 | 132 | if balance > 1 and key > current.left.key: # Left Right 133 | current.left = self.left_rotate(current.left) 134 | if stack and stack[-1].left == current: 135 | stack[-1].left = self.right_rotate(current) 136 | elif stack: 137 | stack[-1].right = self.right_rotate(current) 138 | else: 139 | return self.right_rotate(current) 140 | 141 | if balance < -1 and key < current.right.key: # Right Left 142 | current.right = self.right_rotate(current.right) 143 | if stack and stack[-1].left == current: 144 | stack[-1].left = self.left_rotate(current) 145 | elif stack: 146 | stack[-1].right = self.left_rotate(current) 147 | else: 148 | return self.left_rotate(current) 149 | return root 150 | 151 | 152 | def delete_iterative(self, root, key): 153 | """ 154 | Iterative deletion in AVL tree with rebalancing. 155 | 156 | Args: 157 | root (AVLNode): Root of the AVL tree. 158 | key (float): Key to delete. 159 | 160 | Returns: 161 | AVLNode: New root of the AVL tree after deletion. 162 | """ 163 | if not root: 164 | return root 165 | 166 | stack = [] 167 | parent = None 168 | curr = root 169 | 170 | # Find the node to delete 171 | while curr and not floats_are_equal(curr.key , key): 172 | stack.append(curr) 173 | parent = curr 174 | if key < curr.key: 175 | curr = curr.left 176 | else: 177 | curr = curr.right 178 | 179 | if not curr: # Key not found 180 | return root 181 | 182 | # Case 1: Node has no children or one child 183 | if not curr.left or not curr.right: 184 | child = curr.left if curr.left else curr.right 185 | if not stack: 186 | return child # Deleting the root 187 | if stack[-1].left == curr: 188 | stack[-1].left = child 189 | else: 190 | stack[-1].right = child 191 | 192 | # Case 2: Node has two children 193 | else: 194 | # Find the in-order successor (leftmost node in the right subtree) 195 | succ_stack = [] 196 | succ = curr.right 197 | while succ.left: 198 | succ_stack.append(succ) 199 | succ = succ.left 200 | 201 | # Replace the value 202 | curr.key = succ.key 203 | 204 | # Remove the successor node 205 | if succ_stack: 206 | succ_stack[-1].left = succ.right 207 | else: 208 | curr.right = succ.right 209 | 210 | # Rebalance from the bottom up 211 | while stack: 212 | node = stack.pop() 213 | node.height = 1 + max(self.get_height(node.left), self.get_height(node.right)) 214 | balance_factor = self.get_balance(node) 215 | 216 | # Left-heavy subtree 217 | if balance_factor > 1: 218 | if self.get_balance(node.left) >= 0: # Left-Left case 219 | if stack: 220 | if stack[-1].left == node: 221 | stack[-1].left = self.right_rotate(node) 222 | else: 223 | stack[-1].right = self.right_rotate(node) 224 | else: 225 | root = self.right_rotate(node) 226 | else: # Left-Right case 227 | node.left = self.left_rotate(node.left) 228 | if stack: 229 | if stack[-1].left == node: 230 | stack[-1].left = self.right_rotate(node) 231 | else: 232 | stack[-1].right = self.right_rotate(node) 233 | else: 234 | root = self.right_rotate(node) 235 | 236 | # Right-heavy subtree 237 | if balance_factor < -1: 238 | if self.get_balance(node.right) <= 0: # Right-Right case 239 | if stack: 240 | if stack[-1].left == node: 241 | stack[-1].left = self.left_rotate(node) 242 | else: 243 | stack[-1].right = self.left_rotate(node) 244 | else: 245 | root = self.left_rotate(node) 246 | else: # Right-Left case 247 | node.right = self.right_rotate(node.right) 248 | if stack: 249 | if stack[-1].left == node: 250 | stack[-1].left = self.left_rotate(node) 251 | else: 252 | stack[-1].right = self.left_rotate(node) 253 | else: 254 | root = self.left_rotate(node) 255 | 256 | return root 257 | 258 | 259 | 260 | 261 | def insert_recursive(self, root: 'AVLNode | None', key: float) -> 'AVLNode | None': 262 | """ 263 | Recursively insert a key into the AVL tree. 264 | 265 | Args: 266 | root (AVLNode | None): The root of the subtree. 267 | key (float): The key to insert. 268 | 269 | Returns: 270 | AVLNode | None: The new root of the subtree. 271 | """ 272 | # 1. BST insert_recursiveion 273 | if not root: 274 | return AVLNode(key) 275 | elif key < root.key: 276 | root.left = self.insert_recursive(root.left , key) 277 | elif key > root.key: 278 | root.right = self.insert_recursive(root.right , key) 279 | else: 280 | return root # Key already exists, no need to insert 281 | # 2. update height 282 | root.height = 1 + max(self.get_height(root.left) , self.get_height(root.right)) 283 | 284 | # 3. balance factor 285 | balance_factor = self.get_balance(root) 286 | 287 | # 4. fix up operations if needed : similar functionality to the fix_insert_recursive in Red-Black trees 288 | # Left Left case: 289 | if balance_factor > 1 and key < root.left.key: 290 | return self.right_rotate(root) 291 | 292 | # Right Right case (dual of the previous case)): 293 | if balance_factor < -1 and key > root.right.key: 294 | return self.left_rotate(root) 295 | # Left Right case: 296 | if balance_factor > 1 and key > root.left.key: 297 | root.left = self.left_rotate(root.left) 298 | return self.right_rotate(root) 299 | # Right Left case (dual of the previous case): 300 | if balance_factor < - 1 and key < root.right.key: 301 | root.right = self.right_rotate(root.right) 302 | return self.left_rotate(root) 303 | return root 304 | def delete_recursive(self, root: 'AVLNode | None', key: float) -> 'AVLNode | None': 305 | """ 306 | Recursively deletes a key from the AVL tree. 307 | 308 | Args: 309 | root (AVLNode | None): The root of the subtree. 310 | key (float): The key to delete. 311 | 312 | Returns: 313 | AVLNode | None: The new root of the subtree. 314 | """ 315 | if not root: 316 | return root 317 | # BST deletion 318 | if key < root.key: 319 | root.left = self.delete_recursive(root.left , key) 320 | elif key > root.key: 321 | root.right = self.delete_recursive(root.right , key) 322 | else: 323 | if not root.left: 324 | return root.right 325 | elif not root.right: 326 | return root.left 327 | # if the node has two non-null children then find the inorder succesor (similar to what we had in Red-Black trees) 328 | temp = self.get_min_val_node(root.right) 329 | root.key = temp.key 330 | root.right = self.delete_recursive(root.right , temp.key) 331 | if not root: # If node was deleted and subtree is now empty 332 | return None 333 | # update the height 334 | root.height = 1 + max(self.get_height(root.left), self.get_height(root.right)) 335 | 336 | balance_factor = self.get_balance(root) 337 | # Left Left case 338 | if balance_factor > 1 and self.get_balance(root.left) >= 0: 339 | return self.right_rotate(root) 340 | # Left Right case 341 | if balance_factor > 1 and self.get_balance(root.left) < 0: 342 | root.left = self.left_rotate(root.left) 343 | return self.right_rotate(root) 344 | # Right Right case 345 | if balance_factor < -1 and self.get_balance(root.right) <= 0: 346 | return self.left_rotate(root) 347 | # Right Left case 348 | if balance_factor< -1 and self.get_balance(root.right) > 0: 349 | root.right = self.right_rotate(root.right) 350 | return self.left_rotate(root) 351 | 352 | return root 353 | 354 | def get_min_val_node(self , node): 355 | curr = node 356 | while curr.left: 357 | curr = curr.left 358 | return curr 359 | # Traversals 360 | # for preorder traversal (root -> left -> right) 361 | def pre_order(self , root): 362 | if root: 363 | print(root.key , end = "") 364 | self.preorder(root.left) 365 | self.preorder(root.right) 366 | # for inorder traversal (left -> root -> right) 367 | def in_order(self , root): 368 | if root: 369 | self.in_order(root.left) 370 | print(root.key , end = "") 371 | self.in_order(root.right) 372 | # for postorder traversal (left -> right -> root) 373 | def post_order(self , root): 374 | if root: 375 | self.post_order(root.left) 376 | self.post_order(root.right) 377 | print(root.key , end = "") 378 | def morris_traversal(self , root): 379 | current = root 380 | while current: 381 | if not current.left: 382 | print(current.key , end = "") 383 | current = current.right 384 | else: 385 | pre = current.left 386 | while pre.right and pre.right != current: 387 | pre = pre.right 388 | if not pre.right: 389 | pre.right = current 390 | current = current.left 391 | else: 392 | pre.right = None 393 | print(current.key , end = "") 394 | current = current.right 395 | 396 | def get_max_val_node(self, node): 397 | """ 398 | Finds the node with the largest key in the subtree rooted at the given node. 399 | 400 | Args: 401 | node: The root of the subtree. 402 | Returns: 403 | The node with the largest key. 404 | """ 405 | curr = node 406 | while curr.right: 407 | curr = curr.right 408 | return curr 409 | 410 | def remove_right_most(self , root): 411 | """ 412 | Finds and removes the rightmost node from the subtree rooted at `root`. 413 | Returns: 414 | A tuple containing the updated tree and the removed node. 415 | """ 416 | 417 | if not root: 418 | return None , None 419 | if not root.right: 420 | return root.left , root 421 | parent = None 422 | curr = root 423 | while curr.right: 424 | parent = curr 425 | curr = curr.right 426 | 427 | if parent: 428 | parent.right = curr.left 429 | 430 | return root , curr 431 | 432 | 433 | def right_most_node(self , root): 434 | """ 435 | Finds the rightmost node from the subtree rooted at 'root'. 436 | 437 | 438 | Args: 439 | root (AVLNode): the root of the subtree 440 | """ 441 | while root.right and root: 442 | root = root.right 443 | return root 444 | def merge_join_based(self , root1 , root2): 445 | """ 446 | Merges two AVL trees using a join-based method. assumes keys in root 1 < key in root2 447 | Args: 448 | root1: The root node of the first AVL tree. 449 | root2: The root node of the second AVL tree. 450 | Returns: 451 | The root of the merged AVL tree. 452 | """ 453 | if not root1: 454 | return root2 455 | if not root2: 456 | return root1 457 | 458 | updated_root , removed_node = self.remove_right_most(root1) 459 | new_root = AVLNode(removed_node.key) 460 | new_root.left = updated_root 461 | new_root.right = root2 462 | 463 | return self.rebalance_from_node(new_root) 464 | 465 | 466 | def split_node(self , root , key): 467 | """Splits the subtree rooted at `root` based on the provided key 468 | 469 | Args: 470 | root (AVLNode): The root node of the subtree to split 471 | key : The key to split around 472 | Returns: 473 | A tuple with two nodes, the first one is the tree containing all elements smaller than the key, 474 | and the second one has all elements greater than the key. 475 | """ 476 | if not root: 477 | return None , None 478 | if key < root.key: 479 | left_tree , right_tree = self.split_node(root.left , key) 480 | root.left = right_tree 481 | return left_tree , self.rebalance_from_node(root) 482 | 483 | elif key > root.key: 484 | left_tree , right_tree = self.split_node(root.right , key) 485 | root.right = left_tree 486 | return self.rebalance_from_node(root) , right_tree 487 | else: 488 | return root.left , root.right 489 | 490 | def rebalance_from_node(self , node): 491 | """ 492 | Rebalances the AVL tree from the given node upwards 493 | 494 | Args: 495 | node (AVLNode): The node from which to start rebalancing. 496 | 497 | Returns: 498 | The root of the balanced tree 499 | """ 500 | # Update the height of the current node 501 | node.height = 1 + max(self.get_height(node.left), self.get_height(node.right)) 502 | 503 | # Calculate the balance factor 504 | balance_factor = self.get_balance(node) 505 | 506 | # Left-heavy subtree 507 | if balance_factor > 1: 508 | if self.get_balance(node.left) >= 0: 509 | # Left-Left case 510 | return self.right_rotate(node) 511 | else: 512 | # Left-Right case 513 | node.left = self.left_rotate(node.left) 514 | return self.right_rotate(node) 515 | 516 | # Right-heavy subtree 517 | if balance_factor < -1: 518 | if self.get_balance(node.right) <= 0: 519 | # Right-Right case 520 | return self.left_rotate(node) 521 | else: 522 | # Right-Left case 523 | node.right = self.right_rotate(node.right) 524 | return self.left_rotate(node) 525 | 526 | # Return the (possibly updated) node 527 | return node 528 | 529 | 530 | 531 | def merge_split_based(self , root1 , root2) : 532 | """ Purpose: Implements the merge operation using a split-based approach. It splits the tree and then merges 533 | the resulting trees using the join based approach. 534 | Algorithm: 535 | Base Cases: Return the other node if one of the node is None. 536 | Find the maximum node of root1: Find the max of the first tree using get_max_val_node 537 | Split root2: Split the second tree using split_node based on the key of the maximum node of root1, obtaining the left and right subtrees of root2 (left_tree, right_tree). 538 | Merge Trees: Merge the first tree with left_tree using merge_join_based. Then merge the merged tree from the previous step with right_tree again using merge_join_based. 539 | 540 | Args: 541 | root1 (AVLTree): The root node of the first AVL tree. 542 | root2 (AVLTree): The root node of the second AVL tree. 543 | 544 | Returns: 545 | root: The root node of the merged AVL tree 546 | """ 547 | if not root1: 548 | return root2 549 | if not root2: 550 | return root1 551 | max_val_node = self.get_max_val_node(root1) 552 | left_tree , right_tree = self.split_node(root2 , max_val_node.key) 553 | merged_tree = self.merge_join_based(root1 , left_tree) 554 | return self.merge_join_based(merged_tree ,right_tree) 555 | 556 | 557 | 558 | # if __name__ == "__main__": 559 | # avl = AVLTree() 560 | # root = None 561 | # elements = [10, 20, 30, 40, 50, 25] 562 | 563 | # for element in elements: 564 | # root = avl.insert_recursive(root, element) 565 | 566 | # print("Inorder Traversal (after insert_recursiveion):") 567 | # avl.in_order(root) 568 | # print("\n") 569 | 570 | # print("Preorder Traversal (after insert_recursiveion):") 571 | # avl.pre_order(root) 572 | # print("\n") 573 | 574 | # print("Postorder Traversal (after insert_recursiveion):") 575 | # avl.post_order(root) 576 | # print("\n") 577 | 578 | # print("Morris Traversal (after insert_recursiveion):") 579 | # avl.morris_traversal(root) 580 | # print("\n") 581 | 582 | # print("Size : ", avl.get_size(root)) 583 | # print("\n") 584 | 585 | # search_key = 30 586 | # search_result = avl.search(root, search_key) 587 | # if search_result: 588 | # print(f"Found node with key {search_key}") 589 | # else: 590 | # print(f"Node with key {search_key} not found") 591 | 592 | # delete_recursive_key = 25 593 | # root = avl.delete_recursive(root, delete_recursive_key) 594 | # print(f"\nInorder Traversal after deleting node with key {delete_recursive_key}:") 595 | # avl.in_order(root) 596 | # # print("\n") 597 | 598 | # print("Size : ", avl.get_size(root)) 599 | # print(AVLTree.insert_recursive.__doc__) # docstringreslove errors in the attached project, also add error and edge cases handeling to the AVL trees -------------------------------------------------------------------------------- /Trees/AVLTree/GUI.py: -------------------------------------------------------------------------------- 1 | import tkinter as tk 2 | from tkinter import messagebox 3 | from math import cos, sin, radians 4 | from AVLTree import AVLTree, AVLNode 5 | 6 | 7 | class AVLTreeGUI: 8 | def __init__(self): 9 | self.tree = AVLTree() 10 | self.tree.root = None 11 | self.window = tk.Tk() 12 | self.window.title("AVL Tree GUI") 13 | self.canvas = tk.Canvas(self.window, width=800, height=600, bg="white") 14 | self.canvas.pack() 15 | 16 | # Input controls 17 | self.control_frame = tk.Frame(self.window) 18 | self.control_frame.pack() 19 | 20 | # Insert Controls 21 | self.insert_label = tk.Label(self.control_frame, text="Insert Key:") 22 | self.insert_label.grid(row=0, column=0) 23 | self.insert_entry = tk.Entry(self.control_frame) 24 | self.insert_entry.grid(row=0, column=1) 25 | self.insert_button = tk.Button(self.control_frame, text="Insert", command=self.insert_key) 26 | self.insert_button.grid(row=0, column=2) 27 | 28 | # Delete Controls 29 | self.delete_label = tk.Label(self.control_frame, text="Delete Key:") 30 | self.delete_label.grid(row=1, column=0) 31 | self.delete_entry = tk.Entry(self.control_frame) 32 | self.delete_entry.grid(row=1, column=1) 33 | self.delete_button = tk.Button(self.control_frame, text="Delete", command=self.delete_key) 34 | self.delete_button.grid(row=1, column=2) 35 | 36 | # Method Selection 37 | self.method_frame = tk.Frame(self.window) 38 | self.method_frame.pack() 39 | self.method_label = tk.Label(self.method_frame, text="Select Method:") 40 | self.method_label.grid(row=0, column=0) 41 | 42 | self.method_var = tk.StringVar(value="recursive") 43 | self.recursive_radio = tk.Radiobutton(self.method_frame, text="Recursive", variable=self.method_var, value="recursive") 44 | self.recursive_radio.grid(row=0, column=1) 45 | self.iterative_radio = tk.Radiobutton(self.method_frame, text="Iterative", variable=self.method_var, value="iterative") 46 | self.iterative_radio.grid(row=0, column=2) 47 | 48 | def insert_key(self): 49 | key_str = self.insert_entry.get().strip() # Get and strip any extra spaces 50 | if not key_str: # Check if the input is empty 51 | messagebox.showerror("Error", "Please enter a key.") 52 | return 53 | try: 54 | key = float(self.insert_entry.get()) 55 | method = self.method_var.get() 56 | if method == "recursive": 57 | self.tree.root = self.tree.insert_recursive(self.tree.root, key) 58 | else: 59 | self.tree.root = self.tree.insert_iterative(self.tree.root, key) 60 | self.insert_entry.delete(0, tk.END) 61 | self.draw_tree() 62 | except ValueError: 63 | messagebox.showerror("Error", "Please enter a valid number.") 64 | 65 | def delete_key(self): 66 | key_str = self.delete_entry.get().strip() # Get and strip any extra spaces 67 | if not key_str: # Check if the input is empty 68 | messagebox.showerror("Error", "Please enter a key.") 69 | return 70 | try: 71 | key = float(self.delete_entry.get()) 72 | method = self.method_var.get() 73 | if method == "recursive": 74 | self.tree.root = self.tree.delete_recursive(self.tree.root, key) 75 | else: 76 | self.tree.root = self.tree.delete_iterative(self.tree.root, key) 77 | self.delete_entry.delete(0, tk.END) 78 | self.draw_tree() 79 | except ValueError: 80 | messagebox.showerror("Error", "Please enter a valid number.") 81 | 82 | def draw_tree(self): 83 | self.canvas.delete("all") 84 | if self.tree.root: 85 | self._draw_tree(self.tree.root, 400, 50, 200) 86 | 87 | def _draw_tree(self, node, x, y, x_offset): 88 | if node.left: 89 | self.canvas.create_line(x, y, x - x_offset, y + 60) 90 | self._draw_tree(node.left, x - x_offset, y + 60, x_offset // 2) 91 | if node.right: 92 | self.canvas.create_line(x, y, x + x_offset, y + 60) 93 | self._draw_tree(node.right, x + x_offset, y + 60, x_offset // 2) 94 | self.canvas.create_oval(x - 15, y - 15, x + 15, y + 15, fill="lightblue") 95 | self.canvas.create_text(x, y, text=str(node.key)) 96 | 97 | def run(self): 98 | self.window.mainloop() -------------------------------------------------------------------------------- /Trees/AVLTree/main.py: -------------------------------------------------------------------------------- 1 | from GUI import AVLTreeGUI 2 | 3 | if __name__ == "__main__": 4 | app = AVLTreeGUI() 5 | app.run() -------------------------------------------------------------------------------- /Trees/AVLTree/utils.py: -------------------------------------------------------------------------------- 1 | def visualize_tree_by_text(root): 2 | if not root: 3 | return "" 4 | 5 | results = [] 6 | def traverse(node , level = 0): 7 | if node: 8 | traverse(node.right , level + 1) 9 | results.append(" " * 4 * level + f"{node.key}\n") 10 | 11 | traverse(root) 12 | return "".join(results) 13 | 14 | class AVLTreeError(Exception): 15 | pass -------------------------------------------------------------------------------- /Trees/BTrees/BPlussTree.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from graphviz import Digraph 3 | 4 | class BPlusTree: 5 | def __init__(self, order): 6 | if order < 3: 7 | raise ValueError("Order must be at least 3 for B+ tree.") 8 | self.order = order 9 | self.root = LeafNode(order) 10 | 11 | def insert(self, key, value): 12 | if not isinstance(key , int): 13 | raise TypeError("Key must be an integer.") 14 | if self.search(key) is not None: 15 | raise ValueError(f"Duplicate key {key}. Key must be unique for B+ tree.") 16 | self.root.insert(key, value) 17 | if self.root.is_full(): 18 | new_root = InternalNode(self.order) 19 | new_root.children.append(self.root) 20 | new_root.split(0, self.root) 21 | self.root = new_root 22 | 23 | def delete(self, key): 24 | if not isinstance(key, int): 25 | raise TypeError("Key must be an integer") 26 | if not self.search(key): 27 | raise ValueError(f"Cannot delete {key}, it does not exist in the B+ tree.") 28 | self.root.delete(key) 29 | if isinstance(self.root, InternalNode) and len(self.root.children) == 1: 30 | self.root = self.root.children[0] 31 | 32 | def search(self, key): 33 | if not isinstance(key, int): 34 | raise TypeError("Key must be an integer") 35 | return self.root.search(key) 36 | 37 | def visualize(self): 38 | def recurse(node, depth): 39 | if isinstance(node, LeafNode): 40 | return f"{' ' * depth}Leaf: {node.keys}\n" 41 | elif isinstance(node, InternalNode): 42 | result = f"{' ' * depth}Internal: {node.keys}\n" 43 | for child in node.children: 44 | result += recurse(child, depth + 1) 45 | return result 46 | return recurse(self.root, 0) 47 | 48 | def visualize_graphical(self, filename="bplustree"): 49 | graph = Digraph("BPlusTree", format="png") 50 | graph.attr("node", shape="record") 51 | 52 | def add_node(node, node_id): 53 | if isinstance(node, LeafNode): 54 | label = f"Leaf | {{ {' | '.join(map(str, node.keys))} }}" 55 | graph.node(node_id, label) 56 | elif isinstance(node, InternalNode): 57 | label = f"Internal | {{ {' | '.join(map(str, node.keys))} }}" 58 | graph.node(node_id, label) 59 | return node_id 60 | 61 | def recurse(node, node_id): 62 | add_node(node, node_id) 63 | if isinstance(node, InternalNode): 64 | for i, child in enumerate(node.children): 65 | child_id = f"{node_id}_{i}" 66 | graph.edge(node_id, child_id) 67 | recurse(child, child_id) 68 | 69 | recurse(self.root, "root") 70 | graph.render(filename, view=True) 71 | 72 | def __str__(self): 73 | return self.visualize() 74 | 75 | class Node: 76 | def __init__(self, order): 77 | self.order = order 78 | self.keys = [] 79 | 80 | def is_full(self): 81 | return len(self.keys) >= self.order 82 | 83 | def is_underflow(self): 84 | return len(self.keys) < (self.order + 1) // 2 85 | 86 | class InternalNode(Node): 87 | def __init__(self, order): 88 | super().__init__(order) 89 | self.children = [] 90 | 91 | def insert(self, key, value): 92 | child = self.find_child(key) 93 | child.insert(key, value) 94 | if child.is_full(): 95 | index = self.children.index(child) 96 | self.split(index, child) 97 | 98 | def delete(self, key): 99 | child = self.find_child(key) 100 | child.delete(key) 101 | if child.is_underflow(): 102 | index = self.children.index(child) 103 | self.rebalance(index) 104 | 105 | def rebalance(self, index): 106 | child = self.children[index] 107 | if isinstance(child, LeafNode): 108 | # Handle underflow in leaf nodes 109 | if index > 0 and len(self.children[index - 1].keys) > (self.order - 1) // 2: 110 | child.borrow_from_left(self.children[index - 1]) 111 | elif index < len(self.children) - 1 and len(self.children[index + 1].keys) > (self.order - 1) // 2: 112 | child.borrow_from_right(self.children[index + 1]) 113 | else: 114 | if index > 0: 115 | self.merge(index - 1) 116 | else: 117 | self.merge(index) 118 | else: 119 | # Internal node underflow 120 | if index > 0 and len(self.children[index - 1].keys) > (self.order - 1) // 2: 121 | self.borrow_left(index) 122 | elif index < len(self.children) - 1 and len(self.children[index + 1].keys) > (self.order - 1) // 2: 123 | self.borrow_right(index) 124 | else: 125 | if index > 0: 126 | self.merge(index - 1) 127 | else: 128 | self.merge(index) 129 | 130 | def merge(self, index): 131 | child = self.children[index] 132 | sibling = self.children[index + 1] 133 | # Move the separator key to child 134 | child.keys.append(self.keys.pop(index)) 135 | # Append sibling's keys and children to child 136 | child.keys.extend(sibling.keys) 137 | child.children.extend(sibling.children) 138 | # Remove sibling from children 139 | self.children.pop(index + 1) 140 | 141 | def borrow_left(self, index): 142 | child = self.children[index] 143 | left_sibling = self.children[index - 1] 144 | # Move the separator key from parent to child 145 | child.keys.insert(0, self.keys[index - 1]) 146 | # Move the rightmost child of left_sibling to child 147 | child.children.insert(0, left_sibling.children.pop()) 148 | # Update the separator key in parent 149 | self.keys[index - 1] = left_sibling.keys.pop() 150 | 151 | def borrow_right(self, index): 152 | child = self.children[index] 153 | right_sibling = self.children[index + 1] 154 | # Move the separator key from parent to child 155 | child.keys.append(self.keys[index]) 156 | # Move the leftmost child of right_sibling to child 157 | child.children.append(right_sibling.children.pop(0)) 158 | # Update the separator key in parent 159 | self.keys[index] = right_sibling.keys.pop(0) 160 | 161 | def find_child(self, key): 162 | for i, k in enumerate(self.keys): 163 | if key < k: 164 | return self.children[i] 165 | return self.children[-1] 166 | 167 | def search(self, key): 168 | return self.find_child(key).search(key) 169 | 170 | def __str__(self): 171 | return f"InternalNode(keys={self.keys}, children={[str(child) for child in self.children]})" 172 | 173 | class LeafNode(Node): 174 | def __init__(self, order): 175 | super().__init__(order) 176 | self.values = [] 177 | self.next = None 178 | 179 | def insert(self, key, value): 180 | index = self.find_index(key) 181 | self.keys.insert(index, key) 182 | self.values.insert(index, value) 183 | 184 | def delete(self, key): 185 | index = self.find_index(key) 186 | if index < len(self.keys) and self.keys[index] == key: 187 | self.keys.pop(index) 188 | self.values.pop(index) 189 | # Check for underflow and handle it 190 | if self.is_underflow(): 191 | # Try borrowing from left sibling 192 | left_sibling = self.get_prev_leaf() 193 | if left_sibling and not left_sibling.is_underflow(): 194 | self.borrow_from_left(left_sibling) 195 | else: 196 | # Try borrowing from right sibling 197 | right_sibling = self.get_next_leaf() 198 | if right_sibling and not right_sibling.is_underflow(): 199 | self.borrow_from_right(right_sibling) 200 | else: 201 | # Merge with a sibling 202 | if left_sibling: 203 | left_sibling.merge_with_next(self) 204 | # Update parent to remove the separator key 205 | parent = self.find_parent() 206 | if parent: 207 | parent.keys.pop(parent.children.index(self)) 208 | parent.children.remove(self) 209 | elif right_sibling: 210 | self.merge_with_next(right_sibling) 211 | # Update parent to remove the separator key 212 | parent = self.find_parent() 213 | if parent: 214 | parent.keys.pop(parent.children.index(right_sibling)) 215 | parent.children.remove(right_sibling) 216 | 217 | def find_index(self, key): 218 | for i, k in enumerate(self.keys): 219 | if key < k: 220 | return i 221 | return len(self.keys) 222 | 223 | def search(self, key): 224 | for i, k in enumerate(self.keys): 225 | if k == key: 226 | return self.values[i] 227 | return None 228 | 229 | def get_prev_leaf(self): 230 | current = self 231 | while current.next: 232 | current = current.next 233 | if current.next == self: 234 | return None # Circular linked list 235 | return current 236 | 237 | def get_next_leaf(self): 238 | return self.next 239 | 240 | def borrow_from_left(self, left_sibling): 241 | # Borrow a key from the left sibling 242 | borrowed_key = left_sibling.keys.pop() 243 | borrowed_value = left_sibling.values.pop() 244 | # Insert the borrowed key and value at the beginning of self 245 | self.keys.insert(0, borrowed_key) 246 | self.values.insert(0, borrowed_value) 247 | # Update parent node's key if necessary 248 | parent = self.find_parent() 249 | if parent: 250 | index = parent.children.index(self) 251 | parent.keys[index - 1] = left_sibling.keys[-1] 252 | 253 | def borrow_from_right(self, right_sibling): 254 | # Borrow a key from the right sibling 255 | borrowed_key = right_sibling.keys.pop(0) 256 | borrowed_value = right_sibling.values.pop(0) 257 | # Append the borrowed key and value to self 258 | self.keys.append(borrowed_key) 259 | self.values.append(borrowed_value) 260 | # Update parent node's key if necessary 261 | parent = self.find_parent() 262 | if parent: 263 | index = parent.children.index(self) 264 | parent.keys[index] = right_sibling.keys[0] 265 | 266 | def merge_with_next(self, next_leaf): 267 | # Merge self with the next leaf 268 | self.keys.extend(next_leaf.keys) 269 | self.values.extend(next_leaf.values) 270 | self.next = next_leaf.next 271 | # Remove next_leaf from the tree 272 | # Update parent node to remove the separator key 273 | parent = self.find_parent() 274 | if parent: 275 | parent.keys.pop(parent.children.index(next_leaf)) 276 | parent.children.remove(next_leaf) 277 | 278 | def find_parent(self): 279 | # Traverse up from the node to find its parent 280 | # This requires adding a 'parent' reference in nodes or traversing the tree 281 | # For simplicity, assume a way to find the parent is implemented 282 | pass # Implementation depends on how parent references are managed 283 | 284 | def is_underflow(self): 285 | return len(self.keys) < (self.order - 1) // 2 286 | 287 | def __str__(self): 288 | return f"LeafNode(keys={self.keys}, values={self.values}, next={'next leaf' if self.next else 'None'})" 289 | if __name__ == "__main__": 290 | parser = argparse.ArgumentParser(description="B+ Tree CLI") 291 | parser.add_argument("--order", type=int, default=4, help="Order of the B+ Tree") 292 | args = parser.parse_args() 293 | 294 | bptree = BPlusTree(order=args.order) 295 | 296 | while True: 297 | print("\nChoose an operation:") 298 | print("1. Insert key and value") 299 | print("2. Delete key") 300 | print("3. Search key") 301 | print("4. Visualize tree") 302 | print("5. Visualize tree graphically") 303 | print("6. Exit") 304 | 305 | choice = input("Enter your choice: ") 306 | 307 | try: 308 | if choice == "1": 309 | key = int(input("Enter key: ")) 310 | value = input("Enter value: ") 311 | bptree.insert(key, value) 312 | print(f"Inserted key {key} with value '{value}'") 313 | 314 | elif choice == "2": 315 | key = int(input("Enter key to delete: ")) 316 | bptree.delete(key) 317 | print(f"Deleted key {key}") 318 | 319 | elif choice == "3": 320 | key = int(input("Enter key to search: ")) 321 | result = bptree.search(key) 322 | if result is not None: 323 | print(f"Key {key} found with value '{result}'") 324 | else: 325 | print(f"Key {key} not found") 326 | 327 | elif choice == "4": 328 | print("\nTree Visualization:") 329 | print(bptree.visualize()) 330 | 331 | elif choice == "5": 332 | filename = input("Enter filename for graphical visualization (default: bplustree): ") or "bplustree" 333 | print("Generating graphical visualization...") 334 | bptree.visualize_graphical(filename) 335 | 336 | elif choice == "6": 337 | print("Exiting...") 338 | break 339 | 340 | else: 341 | print("Invalid choice. Please try again.") 342 | 343 | except (ValueError, TypeError) as e: 344 | print(f"Error: {e}") -------------------------------------------------------------------------------- /Trees/RedBlackTree/README.md: -------------------------------------------------------------------------------- 1 | # Red-Black Tree with Visualization and Command Interface 2 | 3 | ## Overview 4 | This project provides an implementation of a **Red-Black Tree** in Python with visualization capabilities and an interactive command interface. A Red-Black Tree is a balanced binary search tree that ensures efficient operations while maintaining specific color and structural properties. This implementation includes: 5 | 6 | - Red-Black Tree operations such as **insert**, **delete**, and **search**. 7 | - Tree visualization using **Graphviz**, with visual outputs saved as PNG files in a timestamped directory. 8 | - An interactive console interface for performing operations on the tree. 9 | 10 | ## Features 11 | 12 | ### Core Functionality 13 | - **Red-Black Tree Operations**: 14 | - **Insertion**: Adds a node to the tree while maintaining Red-Black Tree properties. 15 | - **Deletion**: Removes a node and rebalances the tree if necessary. 16 | - **Search**: Locates nodes based on their values. 17 | 18 | ### Visualization 19 | - Automatically generates a graphical representation of the tree after each insertion or deletion. 20 | - Node colors are represented accurately (red or black). 21 | - Visualizations are saved as PNG files in a uniquely timestamped directory. 22 | 23 | ### Command Interface 24 | - **Insert Nodes**: Add nodes interactively using the `insert ` command. 25 | - **Delete Nodes**: Remove nodes interactively using the `delete ` command. 26 | - **Exit Program**: End the session with the `exit` command. 27 | 28 | ## Getting Started 29 | 30 | ### Prerequisites 31 | - Python 3.8 or higher 32 | - Required Python libraries: 33 | - `graphviz` 34 | - `os` 35 | - `uuid` 36 | - `datetime` 37 | 38 | To install Graphviz, follow the steps based on your operating system: 39 | 40 | ### 1. **Installing Graphviz on Windows:** 41 | 42 | - **Using the Installer:** 43 | 1. Go to the [Graphviz download page](https://graphviz.gitlab.io/download/). 44 | 2. Download the Windows installer (e.g., `.exe` file). 45 | 3. Run the installer and follow the on-screen instructions. 46 | 4. Add Graphviz to the system `PATH` environment variable (you can do this during installation, or manually afterward). 47 | 48 | - **Using `choco` (if you have Chocolatey):** 49 | ```bash 50 | choco install graphviz 51 | ``` 52 | 53 | ### 2. **Installing Graphviz on macOS:** 54 | 55 | - **Using Homebrew:** 56 | ```bash 57 | brew install graphviz 58 | ``` 59 | 60 | - **Using MacPorts:** 61 | ```bash 62 | sudo port install graphviz 63 | ``` 64 | 65 | ### 3. **Installing Graphviz on Linux:** 66 | 67 | - **Ubuntu/Debian:** 68 | ```bash 69 | sudo apt-get install graphviz 70 | ``` 71 | 72 | - **Fedora:** 73 | ```bash 74 | sudo dnf install graphviz 75 | ``` 76 | 77 | - **Arch Linux:** 78 | ```bash 79 | sudo pacman -S graphviz 80 | ``` 81 | 82 | ### 4. **Installing the Python `graphviz` Package (for Python bindings):** 83 | 84 | If you also need the Python bindings for Graphviz (useful for creating and rendering graphs directly in Python), you can install them using `pip`: 85 | 86 | ```bash 87 | pip install graphviz 88 | ``` 89 | 90 | After installation, you can use the Graphviz functions in Python to create and visualize graphs. Make sure Graphviz is installed on your system for the Python bindings to work properly. 91 | 92 | ### Running the Program 93 | 1. Save the script as `red_black_tree_visual.py`. 94 | 2. Execute the script: 95 | ```bash 96 | python red_black_tree_visual.py 97 | ``` 98 | 3. Interact with the tree using the commands described below. 99 | 100 | ### Interactive Commands 101 | - **Insert a node**: 102 | ``` 103 | insert 104 | ``` 105 | Example: 106 | ``` 107 | insert 10 108 | insert -5 109 | ``` 110 | 111 | - **Delete a node**: 112 | ``` 113 | delete 114 | ``` 115 | Example: 116 | ``` 117 | delete 10 118 | delete -5 119 | ``` 120 | 121 | - **Exit the program**: 122 | ``` 123 | exit 124 | ``` 125 | 126 | ### Example Usage 127 | ```plaintext 128 | Enter command (insert , delete , or exit): insert 10 129 | Inserting 10 130 | Tree visualization saved as ./Red_Black_20241225_123456/rb_tree_.png 131 | 132 | Enter command (insert , delete , or exit): delete 10 133 | Deleting 10 134 | Tree visualization saved as ./Red_Black_20241225_123456/rb_tree_.png 135 | 136 | Enter command (insert , delete , or exit): exit 137 | Exiting program. 138 | ``` 139 | 140 | ## File Structure 141 | - **`red_black_tree_visual.py`**: Main script containing the Red-Black Tree implementation and visualization functionality. 142 | - **`Red_Black_`**: Folder containing visualized PNG files of the tree structure for each operation. 143 | 144 | ## Implementation Details 145 | 146 | ### Core Components 147 | 1. **Node Class**: 148 | - Represents each node in the Red-Black Tree. 149 | - Stores data, color, and references to left, right, and parent nodes. 150 | 151 | 2. **RedBlackTree Class**: 152 | - Implements Red-Black Tree operations such as insertion, deletion, and search. 153 | - Handles rebalancing and rotations to maintain tree properties. 154 | 155 | 3. **VisualRedBlackTree Class**: 156 | - Extends the `RedBlackTree` class with visualization capabilities using **Graphviz**. 157 | - Creates a timestamped directory to store visualizations of the tree. 158 | 159 | ## Examples 160 | ### Insertion Example 161 | ```plaintext 162 | Inserting 10 163 | Tree visualization saved as ./Red_Black_20241225_123456/rb_tree_.png 164 | ``` 165 | 166 | ### Deletion Example 167 | ```plaintext 168 | Deleting 10 169 | Tree visualization saved as ./Red_Black_20241225_123456/rb_tree_.png 170 | ``` 171 | 172 | ## Acknowledgments 173 | - **Graphviz**: For generating the visualizations. 174 | - **UUID and Datetime**: For unique filenames and folder naming conventions. 175 | 176 | ## License 177 | This project is licensed under the MIT License. See `LICENSE` for details. 178 | 179 | --- 180 | Feel free to extend the implementation with additional features such as tree traversal visualizations, performance metrics, custom visualization styles, or any other related DSA data algorithm/dataStructures. 181 | 182 | -------------------------------------------------------------------------------- /Trees/RedBlackTree/RedBlackTree.py: -------------------------------------------------------------------------------- 1 | from graphviz import Digraph 2 | import os 3 | import uuid 4 | from datetime import datetime 5 | 6 | 7 | def floats_are_equal(a, b, eps=1e-3): 8 | #Returns True if a and b are within eps of each other 9 | return abs(a - b) < eps 10 | 11 | class Node: 12 | def __init__(self, data): 13 | self.data = data 14 | self.color = 'RED' # default color 15 | self.left = None 16 | self.right = None 17 | self.parent = None 18 | 19 | 20 | class RedBlackTree: 21 | 22 | def __init__(self): 23 | self.NIL_LEAF = Node(None) # Sentinel NIL leaf node 24 | self.NIL_LEAF.color = 'BLACK' 25 | self.root = self.NIL_LEAF 26 | 27 | def _transplant(self, u, v): 28 | if u.parent is None: 29 | self.root = v 30 | elif u == u.parent.left: 31 | u.parent.left = v 32 | else: 33 | u.parent.right = v 34 | v.parent = u.parent 35 | 36 | def _minimum(self, node): 37 | while node.left != self.NIL_LEAF: 38 | node = node.left 39 | return node 40 | 41 | def _left_rotate(self, x): 42 | y = x.right 43 | x.right = y.left 44 | if y.left != self.NIL_LEAF: 45 | y.left.parent = x 46 | y.parent = x.parent 47 | if x.parent is None: 48 | self.root = y 49 | elif x == x.parent.left: 50 | x.parent.left = y 51 | else: 52 | x.parent.right = y 53 | y.left = x 54 | x.parent = y 55 | 56 | def _right_rotate(self, y): 57 | x = y.left 58 | y.left = x.right 59 | if x.right != self.NIL_LEAF: 60 | x.right.parent = y 61 | x.parent = y.parent 62 | if y.parent is None: 63 | self.root = x 64 | elif y == y.parent.left: 65 | y.parent.left = x 66 | else: 67 | y.parent.right = x 68 | x.right = y 69 | y.parent = x 70 | 71 | def _fix_insert(self, k): 72 | while k.parent and k.parent.color == 'RED': 73 | if k.parent == k.parent.parent.left: 74 | u = k.parent.parent.right 75 | if u.color == 'RED': 76 | k.parent.color = 'BLACK' 77 | u.color = 'BLACK' 78 | k.parent.parent.color = 'RED' 79 | k = k.parent.parent 80 | else: 81 | if k == k.parent.right: 82 | k = k.parent 83 | self._left_rotate(k) 84 | k.parent.color = 'BLACK' 85 | k.parent.parent.color = 'RED' 86 | self._right_rotate(k.parent.parent) 87 | else: 88 | u = k.parent.parent.left 89 | if u.color == 'RED': 90 | k.parent.color = 'BLACK' 91 | u.color = 'BLACK' 92 | k.parent.parent.color = 'RED' 93 | k = k.parent.parent 94 | else: 95 | if k == k.parent.left: 96 | k = k.parent 97 | self._right_rotate(k) 98 | k.parent.color = 'BLACK' 99 | k.parent.parent.color = 'RED' 100 | self._left_rotate(k.parent.parent) 101 | self.root.color = 'BLACK' 102 | 103 | def insert(self, data): 104 | node = Node(data) 105 | node.left = self.NIL_LEAF 106 | node.right = self.NIL_LEAF 107 | if self.root == self.NIL_LEAF: 108 | self.root = node 109 | node.color = 'BLACK' 110 | else: 111 | current = self.root 112 | while current != self.NIL_LEAF: 113 | parent = current 114 | if node.data < current.data: 115 | current = current.left 116 | else: 117 | current = current.right 118 | node.parent = parent 119 | if node.data < parent.data: 120 | parent.left = node 121 | else: 122 | parent.right = node 123 | self._fix_insert(node) 124 | def _fix_delete(self, x): 125 | while x != self.root and x.color == 'BLACK': 126 | if x == x.parent.left: 127 | s = x.parent.right 128 | if s.color == 'RED': 129 | s.color = 'BLACK' 130 | x.parent.color = 'RED' 131 | self._left_rotate(x.parent) 132 | s = x.parent.right 133 | if s.left.color == 'BLACK' and s.right.color == 'BLACK': 134 | s.color = 'RED' 135 | x = x.parent 136 | else: 137 | if s.right.color == 'BLACK': 138 | s.left.color = 'BLACK' 139 | s.color = 'RED' 140 | self._right_rotate(s) 141 | s = x.parent.right 142 | s.color = x.parent.color 143 | x.parent.color = 'BLACK' 144 | s.right.color = 'BLACK' 145 | self._left_rotate(x.parent) 146 | x = self.root 147 | else: 148 | s = x.parent.left 149 | if s.color == 'RED': 150 | s.color = 'BLACK' 151 | x.parent.color = 'RED' 152 | self._right_rotate(x.parent) 153 | s = x.parent.left 154 | if s.left.color == 'BLACK' and s.right.color == 'BLACK': 155 | s.color = 'RED' 156 | x = x.parent 157 | else: 158 | if s.left.color == 'BLACK': 159 | s.right.color = 'BLACK' 160 | s.color = 'RED' 161 | self._left_rotate(s) 162 | s = x.parent.left 163 | s.color = x.parent.color 164 | x.parent.color = 'BLACK' 165 | s.left.color = 'BLACK' 166 | self._right_rotate(x.parent) 167 | x = self.root 168 | x.color = 'BLACK' 169 | 170 | def delete(self, data): 171 | node = self.root 172 | z = self.NIL_LEAF 173 | while node != self.NIL_LEAF: 174 | # if node.data == data: 175 | # z = node 176 | # if node.data <= data: 177 | # node = node.right 178 | # else: 179 | # node = node.left 180 | if floats_are_equal(node.data , data): 181 | z = node 182 | if node.data <= data: 183 | node = node.right 184 | else: 185 | node = node.left 186 | if z == self.NIL_LEAF: 187 | print("Couldn't find key in the tree") 188 | return 189 | y = z 190 | y_original_color = y.color 191 | if z.left == self.NIL_LEAF: 192 | x = z.right 193 | self._transplant(z, z.right) 194 | elif z.right == self.NIL_LEAF: 195 | x = z.left 196 | self._transplant(z, z.left) 197 | else: 198 | y = self._minimum(z.right) 199 | y_original_color = y.color 200 | x = y.right 201 | if y.parent == z: 202 | x.parent = y 203 | else: 204 | self._transplant(y, y.right) 205 | y.right = z.right 206 | y.right.parent = y 207 | self._transplant(z, y) 208 | y.left = z.left 209 | y.left.parent = y 210 | y.color = z.color 211 | if y_original_color == 'BLACK': 212 | self._fix_delete(x) 213 | 214 | def search(self, data): 215 | current = self.root 216 | while current != self.NIL_LEAF and current.data != data: 217 | if data < current.data: 218 | current = current.left 219 | else: 220 | current = current.right 221 | return current if current != self.NIL_LEAF else None 222 | 223 | 224 | class VisualRedBlackTree(RedBlackTree): 225 | def __init__(self): 226 | super().__init__() 227 | self.dot = Digraph(comment="Red-Black Tree") 228 | self.output_folder = self._create_output_folder() 229 | 230 | def _create_output_folder(self): 231 | current_dir = os.path.dirname(os.path.abspath(__file__)) 232 | timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") 233 | folder_name = f"Red_Black_{timestamp}" 234 | output_path = os.path.join(current_dir, folder_name) 235 | os.makedirs(output_path, exist_ok=True) 236 | return output_path 237 | 238 | def visualize_tree(self): 239 | self.dot = Digraph(comment="Red-Black Tree") # Reset graph 240 | self._add_nodes_edges(self.root) 241 | file_name = f"rb_tree_{uuid.uuid4()}.png" 242 | file_path = os.path.join(self.output_folder, file_name) 243 | self.dot.render(file_path, format="png", cleanup=True) 244 | print(f"Tree visualization saved as {file_path}") 245 | 246 | def _add_nodes_edges(self, node): 247 | if node == self.NIL_LEAF: 248 | return 249 | 250 | color = "black" if node.color == "BLACK" else "red" 251 | self.dot.node(str(id(node)), label=str(node.data), color=color, fontcolor=color) 252 | 253 | if node.left != self.NIL_LEAF: 254 | self.dot.edge(str(id(node)), str(id(node.left)), label="L") 255 | self._add_nodes_edges(node.left) 256 | 257 | if node.right != self.NIL_LEAF: 258 | self.dot.edge(str(id(node)), str(id(node.right)), label="R") 259 | self._add_nodes_edges(node.right) 260 | 261 | def insert(self, data): 262 | print(f"Inserting {data}") 263 | super().insert(data) 264 | self.visualize_tree() 265 | 266 | def delete(self , data): 267 | print(f"deleting {data}") 268 | super().delete(data) 269 | self.visualize_tree() 270 | if __name__ == "__main__": 271 | rbt = VisualRedBlackTree() 272 | 273 | while True: 274 | command_str = input("Enter command (insert , delete , or exit): ").strip().lower() 275 | parts = command_str.split() 276 | 277 | if not parts: 278 | continue # Handle empty input 279 | 280 | command = parts[0] 281 | 282 | if command == "insert": 283 | if len(parts) == 2: 284 | try: 285 | num = float(parts[1]) 286 | rbt.insert(num) 287 | except ValueError: 288 | print("Invalid number format. Please enter a valid integer or int.") 289 | else: 290 | print("Invalid insert command. Usage: insert ") 291 | elif command == "delete": 292 | if len(parts) == 2: 293 | try: 294 | num = float(parts[1]) 295 | rbt.delete(num) 296 | except ValueError: 297 | print("Invalid number format. Please enter a valid integer or int.") 298 | else: 299 | print("Invalid delete command. Usage: delete ") 300 | elif command == "exit": 301 | print("Exiting program.") 302 | break 303 | else: 304 | print("Invalid command. Please use 'insert', 'delete', or 'exit'.") 305 | """ 306 | 307 | # debugging 308 | # Example Usage 309 | if __name__ == "__main__": 310 | rbt = VisualRedBlackTree() 311 | 312 | rbt.insert(8453) 313 | rbt.insert(4553) 314 | rbt.insert(453) 315 | rbt.insert(453) 316 | rbt.insert(843) 317 | rbt.insert(-12) 318 | rbt.insert(1) 319 | rbt.insert(-1) 320 | rbt.insert(-20) 321 | rbt.insert(25) 322 | rbt.insert(5) 323 | rbt.insert(15) # Automatically visualizes the updated tree 324 | rbt.delete(-12) # Automatically visualizes the updated tree 325 | rbt.delete(453) 326 | rbt.delete(453) 327 | rbt.delete(4553) 328 | rbt.delete(8453) 329 | rbt.insert(4) 330 | rbt.insert(-4) 331 | rbt.insert(6) 332 | rbt.insert(-8) 333 | rbt.insert(-13) 334 | rbt.insert(25) 335 | rbt.insert(5) 336 | rbt.delete(5) 337 | rbt.delete(-13) 338 | """ -------------------------------------------------------------------------------- /pi_estimation.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | 5 | 6 | "Estimating the value of Pi using the formula pi / 4 == circle / square" 7 | 8 | def estimate_pi(num_samples): 9 | """Estimates the value of pi using the Monte Carlo method.""" 10 | 11 | points_inside_circle = 0 12 | total_points = 0 13 | x_inside = [] 14 | y_inside = [] 15 | x_outside = [] 16 | y_outside = [] 17 | 18 | for _ in range(num_samples): 19 | # Generate random x and y coordinates within a square (side length 2) 20 | x = random.uniform(-1, 1) 21 | y = random.uniform(-1, 1) 22 | 23 | # Calculate the distance from the origin 24 | distance = x**2 + y**2 25 | 26 | # Check if the point is inside the unit circle (radius 1) 27 | if distance <= 1: 28 | points_inside_circle += 1 29 | x_inside.append(x) 30 | y_inside.append(y) 31 | else: 32 | x_outside.append(x) 33 | y_outside.append(y) 34 | 35 | total_points += 1 36 | 37 | # Estimate pi: (Area of Circle) / (Area of Square) = pi/4 38 | pi_estimate = 4 * (points_inside_circle / total_points) 39 | return pi_estimate, x_inside, y_inside, x_outside, y_outside 40 | 41 | # Run the simulation 42 | num_samples = 10000 43 | pi_estimate, x_inside, y_inside, x_outside, y_outside = estimate_pi(num_samples) 44 | 45 | print(f"Estimated value of pi with {num_samples} samples: {pi_estimate}") 46 | 47 | # Visualization 48 | plt.figure(figsize=(6, 6)) 49 | plt.scatter(x_inside, y_inside, color='blue', s=1, label='Inside Circle') 50 | plt.scatter(x_outside, y_outside, color='red', s=1, label='Outside Circle') 51 | 52 | # Draw the circle 53 | circle = plt.Circle((0, 0), 1, color='green', fill=False) 54 | plt.gca().add_patch(circle) 55 | 56 | plt.title(f'Monte Carlo Estimation of Pi (N={num_samples})') 57 | plt.xlabel('x') 58 | plt.ylabel('y') 59 | plt.xlim(-1.1, 1.1) 60 | plt.ylim(-1.1, 1.1) 61 | plt.gca().set_aspect('equal', adjustable='box') # Ensure circle looks like a circle 62 | plt.legend() 63 | plt.show() 64 | 65 | # Convergence analysis 66 | sample_sizes = [100, 1000, 10000, 100000, 1000000] 67 | pi_estimates = [] 68 | for n in sample_sizes: 69 | pi_estimates.append(estimate_pi(n)[0]) 70 | 71 | plt.figure(figsize=(8, 6)) 72 | plt.plot(sample_sizes, pi_estimates, marker='o') 73 | plt.axhline(y=np.pi, color='r', linestyle='--', label='True Pi') 74 | plt.xscale('log') 75 | plt.title('Convergence of Pi Estimate with Increasing Sample Size') 76 | plt.xlabel('Number of Samples (log scale)') 77 | plt.ylabel('Estimated Pi') 78 | plt.legend() 79 | plt.grid(True) 80 | plt.show() --------------------------------------------------------------------------------