├── .gitignore ├── README.md ├── cover.png └── src ├── lesson02_custom_function.py ├── lesson02_extend_process.py ├── lesson03_active_children.py ├── lesson03_current_process.py ├── lesson03_daemon.py ├── lesson03_exitcode.py ├── lesson03_isalive.py ├── lesson03_name.py ├── lesson03_parent.py ├── lesson03_pid.py ├── lesson04_barrier.py ├── lesson04_condition.py ├── lesson04_event.py ├── lesson04_lock.py ├── lesson04_semaphore.py ├── lesson05_ctypes.py ├── lesson05_inherited.py ├── lesson05_pipe.py ├── lesson05_queue.py ├── lesson06_apply_async.py ├── lesson06_callback.py ├── lesson06_map.py ├── lesson07_manager_list.py └── lesson07_manager_semaphore.py /.gitignore: -------------------------------------------------------------------------------- 1 | # git ignore 2 | **/.DS_Store 3 | .DS_Store 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Python Multiprocessing Jump-Start 2 | 3 | ![Python Multiprocessing Jump-Start](cover.png) 4 | 5 | * 6 | 7 | This repository provides all source code for the book: 8 | 9 | * **Python Multiprocessing Jump-Start**: _Develop Parallel Programs, Side-Step the GIL, and Use All CPU Cores_, Jason Brownlee, 2022. 10 | 11 | 12 | ## Source Code 13 | You can access all Python .py files directly here: 14 | 15 | * [src/](src/) 16 | 17 | 18 | 19 | ## Get the Book 20 | 21 | You can learn more about the book here: 22 | 23 | * [Gumroad](https://superfastpython.gumroad.com/l/pmj) 24 | * [Amazon](https://amzn.to/3Q4bNL0) 25 | * [GooglePlay](https://play.google.com/store/books/details?id=8yR-EAAAQBAJ) 26 | * [GoogleBooks](https://books.google.com.au/books/about?id=8yR-EAAAQBAJ) 27 | * [Goodreads](https://www.goodreads.com/book/show/61767717-python-multiprocessing-jump-start) 28 | 29 | 30 | ### Book Blurb 31 | 32 | > Unlock parallel Python programming (and run your code on all CPUs). 33 | > 34 | > The multiprocessing module provides easy-to-use process-based concurrency in Python. 35 | > 36 | > Unlike Python threading, multiprocessing side-steps the infamous Global Interpreter Lock (GIL), allowing full parallelism in Python. 37 | > 38 | > This is not some random third-party library, this is an API provided in the Python standard library (already installed on your system). 39 | > 40 | > This is the API you need to use to make your code run faster. 41 | > 42 | > There's just one problem. Few developers know about it (or how to use it well). 43 | > 44 | > Introducing: "Python Multiprocessing Jump-Start". A new book designed to teach you the multiprocessing module in Python, super fast! 45 | > 46 | > You will get a fast-paced, 7-part course to get you started and make you awesome at using the multiprocessing API. 47 | > 48 | > Each of the 7 lessons was carefully designed to teach one critical aspect of the multiprocessing module, with explanations, code snippets and worked examples. 49 | > 50 | > Each lesson ends with an exercise for you to complete to confirm you understand the topic, a summary of what was learned, and links for further reading if you want to go deeper. 51 | > 52 | > Stop copy-pasting code from StackOverflow answers. 53 | > 54 | > Learn Python concurrency correctly, step-by-step. 55 | -------------------------------------------------------------------------------- /cover.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SuperFastPython/PythonMultiprocessingJumpStart/10d3992bf280fd1e7923379abf5ba64bf4b7cdce/cover.png -------------------------------------------------------------------------------- /src/lesson02_custom_function.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of running a function in a new child process 3 | from time import sleep 4 | from multiprocessing import Process 5 | 6 | # custom function to be executed in a child process 7 | def task(): 8 | # block for a moment 9 | sleep(1) 10 | # report a message 11 | print('This is from another process', flush=True) 12 | 13 | # protect the entry point 14 | if __name__ == '__main__': 15 | # create a new process instance 16 | process = Process(target=task) 17 | # start executing the function in the process 18 | process.start() 19 | # wait for the process to finish 20 | print('Waiting for the process...') 21 | process.join() 22 | -------------------------------------------------------------------------------- /src/lesson02_extend_process.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of extending the process class 3 | from time import sleep 4 | from multiprocessing import Process 5 | 6 | # custom process class 7 | class CustomProcess(Process): 8 | # override the run function 9 | def run(self): 10 | # block for a moment 11 | sleep(1) 12 | # report a message 13 | print('This is another process', flush=True) 14 | 15 | # protect the entry point 16 | if __name__ == '__main__': 17 | # create the process 18 | process = CustomProcess() 19 | # start the process 20 | process.start() 21 | # wait for the process to finish 22 | print('Waiting for the process to finish') 23 | process.join() 24 | -------------------------------------------------------------------------------- /src/lesson03_active_children.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of getting a list of active child processes 3 | from time import sleep 4 | from multiprocessing import active_children 5 | from multiprocessing import Process 6 | 7 | # custom function to be executed in a child process 8 | def task(): 9 | # block for a moment 10 | sleep(1) 11 | 12 | # protect the entry point 13 | if __name__ == '__main__': 14 | # create a number of child processes 15 | processes = [Process(target=task) for _ in range(5)] 16 | # start the child processes 17 | for process in processes: 18 | process.start() 19 | # get a list of all active child processes 20 | children = active_children() 21 | # report a count of active children 22 | print(f'Active Children Count: {len(children)}') 23 | # report each in turn 24 | for child in children: 25 | print(child) 26 | -------------------------------------------------------------------------------- /src/lesson03_current_process.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of getting access to the current process 3 | from multiprocessing import current_process 4 | 5 | # protect the entry point 6 | if __name__ == '__main__': 7 | # get the current process 8 | process = current_process() 9 | # report details 10 | print(process) 11 | -------------------------------------------------------------------------------- /src/lesson03_daemon.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of setting a process to be a daemon 3 | from multiprocessing import Process 4 | 5 | # protect the entry point 6 | if __name__ == '__main__': 7 | # create a daemon process 8 | process = Process(daemon=True) 9 | # report if the process is a daemon 10 | print(process.daemon) 11 | -------------------------------------------------------------------------------- /src/lesson03_exitcode.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of checking the exit status of a child process 3 | from time import sleep 4 | from multiprocessing import Process 5 | 6 | # custom function to be executed in a child process 7 | def task(): 8 | # block for a moment 9 | sleep(1) 10 | 11 | # protect the entry point 12 | if __name__ == '__main__': 13 | # create the process 14 | process = Process(target=task) 15 | # report the exit status 16 | print(process.exitcode) 17 | # start the process 18 | process.start() 19 | # report the exit status 20 | print(process.exitcode) 21 | # wait for the process to finish 22 | process.join() 23 | # report the exit status 24 | print(process.exitcode) 25 | -------------------------------------------------------------------------------- /src/lesson03_isalive.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of assessing whether a process is alive 3 | from multiprocessing import Process 4 | 5 | # protect the entry point 6 | if __name__ == '__main__': 7 | # create the process 8 | process = Process() 9 | # report the process is alive 10 | print(process.is_alive()) 11 | -------------------------------------------------------------------------------- /src/lesson03_name.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of setting the process name in the constructor 3 | from multiprocessing import Process 4 | 5 | # protect the entry point 6 | if __name__ == '__main__': 7 | # create a process with a custom name 8 | process = Process(name='MyProcess') 9 | # report process name 10 | print(process.name) 11 | -------------------------------------------------------------------------------- /src/lesson03_parent.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of getting the parent process 3 | from multiprocessing import parent_process 4 | 5 | # protect the entry point 6 | if __name__ == '__main__': 7 | # get the parent process 8 | process = parent_process() 9 | # report details 10 | print(process) 11 | -------------------------------------------------------------------------------- /src/lesson03_pid.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of reporting the native process identifier 3 | from multiprocessing import Process 4 | 5 | # protect the entry point 6 | if __name__ == '__main__': 7 | # create the process 8 | process = Process() 9 | # report the process identifier 10 | print(process.pid) 11 | # start the process 12 | process.start() 13 | # report the process identifier 14 | print(process.pid) 15 | -------------------------------------------------------------------------------- /src/lesson04_barrier.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of using a barrier with processes 3 | from time import sleep 4 | from random import random 5 | from multiprocessing import Process 6 | from multiprocessing import Barrier 7 | 8 | # custom function to be executed in a child process 9 | def task(shared_barrier, ident): 10 | # generate a unique value between 0 and 10 11 | value = random() * 10 12 | # block for a moment 13 | sleep(value) 14 | # report result 15 | print(f'Process {ident} got: {value}', flush=True) 16 | # wait for all other processes to complete 17 | shared_barrier.wait() 18 | 19 | # protect the entry point 20 | if __name__ == '__main__': 21 | # create a barrier for (5 workers + 1 main process) 22 | barrier = Barrier(5 + 1) 23 | # create the worker processes 24 | workers = [Process(target=task, 25 | args=(barrier, i)) for i in range(5)] 26 | # start the worker processes 27 | for worker in workers: 28 | # start process 29 | worker.start() 30 | # wait for all worker processes to finish 31 | print('Main process waiting on all results...') 32 | barrier.wait() 33 | # report once all processes are done 34 | print('All processes have their result') 35 | -------------------------------------------------------------------------------- /src/lesson04_condition.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of wait/notify with a condition for processes 3 | from time import sleep 4 | from multiprocessing import Process 5 | from multiprocessing import Condition 6 | 7 | # custom function to be executed in a child process 8 | def task(shared_condition): 9 | # block for a moment 10 | sleep(1) 11 | # notify a waiting process that the work is done 12 | print('Child sending notification...', flush=True) 13 | with shared_condition: 14 | shared_condition.notify() 15 | 16 | # protect the entry point 17 | if __name__ == '__main__': 18 | # create a condition 19 | condition = Condition() 20 | # acquire the condition 21 | print('Main process waiting for data...') 22 | with condition: 23 | # create a new process to execute the task 24 | worker = Process(target=task, args=(condition,)) 25 | # start the new child process 26 | worker.start() 27 | # wait to be notified by the child process 28 | condition.wait() 29 | # we know the data is ready 30 | print('Main process all done') 31 | -------------------------------------------------------------------------------- /src/lesson04_event.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of using an event object with processes 3 | from time import sleep 4 | from random import random 5 | from multiprocessing import Process 6 | from multiprocessing import Event 7 | 8 | # custom function to be executed in a child process 9 | def task(shared_event, number): 10 | # wait for the event to be set 11 | print(f'Process {number} waiting...', flush=True) 12 | shared_event.wait() 13 | # begin processing, generate a random number 14 | value = random() 15 | # block for a fraction of a second 16 | sleep(value) 17 | # report a message 18 | print(f'Process {number} got {value}', flush=True) 19 | 20 | # protect the entry point 21 | if __name__ == '__main__': 22 | # create a shared event object 23 | event = Event() 24 | # create a suite of processes 25 | processes = [Process(target=task, 26 | args=(event, i)) for i in range(5)] 27 | # start all processes 28 | for process in processes: 29 | process.start() 30 | # block for a moment 31 | print('Main process blocking...') 32 | sleep(2) 33 | # trigger all child processes 34 | event.set() 35 | # wait for all child processes to terminate 36 | for process in processes: 37 | process.join() 38 | -------------------------------------------------------------------------------- /src/lesson04_lock.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of protecting a critical section with a mutex 3 | from time import sleep 4 | from random import random 5 | from multiprocessing import Process 6 | from multiprocessing import Lock 7 | 8 | # custom function to be executed in a child process 9 | def task(shared_lock, ident, value): 10 | # acquire the lock 11 | with shared_lock: 12 | # report a message 13 | print(f'>{ident} got lock, sleeping {value}', 14 | flush=True) 15 | # block for a fraction of a second 16 | sleep(value) 17 | 18 | # protect the entry point 19 | if __name__ == '__main__': 20 | # create the shared mutex lock 21 | lock = Lock() 22 | # create a number of processes with different args 23 | processes = [Process(target=task, 24 | args=(lock, i, random())) for i in range(10)] 25 | # start the processes 26 | for process in processes: 27 | process.start() 28 | # wait for all processes to finish 29 | for process in processes: 30 | process.join() 31 | -------------------------------------------------------------------------------- /src/lesson04_semaphore.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of a semaphore to limit access to resource 3 | from time import sleep 4 | from random import random 5 | from multiprocessing import Process 6 | from multiprocessing import Semaphore 7 | 8 | # custom function to be executed in a child process 9 | def task(shared_semaphore, ident): 10 | # attempt to acquire the semaphore 11 | with shared_semaphore: 12 | # generate a random value between 0 and 1 13 | val = random() 14 | # block for a fraction of a second 15 | sleep(val) 16 | # report result 17 | print(f'Process {ident} got {val}', flush=True) 18 | 19 | # protect the entry point 20 | if __name__ == '__main__': 21 | # create the shared semaphore 22 | semaphore = Semaphore(2) 23 | # create processes 24 | processes = [Process(target=task, 25 | args=(semaphore, i)) for i in range(10)] 26 | # start child processes 27 | for process in processes: 28 | process.start() 29 | # wait for child processes to finish 30 | for process in processes: 31 | process.join() 32 | -------------------------------------------------------------------------------- /src/lesson05_ctypes.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of shared ctype accessed in multiple processes 3 | from random import random 4 | from multiprocessing import Value 5 | from multiprocessing import Process 6 | 7 | # custom function to be executed in a child process 8 | def task(shared_var): 9 | # generate a single floating point value 10 | generated = random() 11 | # store value 12 | shared_var.value = generated 13 | # report progress 14 | print(f'Wrote: {shared_var.value}', flush=True) 15 | 16 | # protect the entry point 17 | if __name__ == '__main__': 18 | # create shared variable 19 | variable = Value('f', 0.0) 20 | # create a child process process 21 | process = Process(target=task, args=(variable,)) 22 | # start the process 23 | process.start() 24 | # wait for the process to finish 25 | process.join() 26 | # read the value 27 | data = variable.value 28 | # report the value 29 | print(f'Read: {data}') 30 | -------------------------------------------------------------------------------- /src/lesson05_inherited.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of sharing global between forked processes 3 | from multiprocessing import Process 4 | from multiprocessing import set_start_method 5 | 6 | # custom function to be executed in a child process 7 | def task(): 8 | # declare global state 9 | global data 10 | # report global state 11 | print(f'child process before: {data}', flush=True) 12 | # change global state 13 | data = 'hello hello!' 14 | # report global state 15 | print(f'child process after: {data}', flush=True) 16 | 17 | # protect the entry point 18 | if __name__ == '__main__': 19 | # set the start method to fork 20 | set_start_method('fork') 21 | # define global state 22 | data = 'Hello there' 23 | # report global state 24 | print(f'main process: {data}') 25 | # start a child process 26 | process = Process(target=task) 27 | process.start() 28 | # wait for the child to terminate 29 | process.join() 30 | # report global state 31 | print(f'main process: {data}') 32 | -------------------------------------------------------------------------------- /src/lesson05_pipe.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of using a pipe between processes 3 | from time import sleep 4 | from random import random 5 | from multiprocessing import Process 6 | from multiprocessing import Pipe 7 | 8 | # custom function generate work items (sender) 9 | def sender(connection): 10 | print('Sender: Running', flush=True) 11 | # generate work 12 | for _ in range(10): 13 | # generate a value 14 | value = random() 15 | # block 16 | sleep(value) 17 | # send data 18 | connection.send(value) 19 | # all done, signal to expect no further messages 20 | connection.send(None) 21 | print('Sender: Done', flush=True) 22 | 23 | # custom function to consume work items (receiver) 24 | def receiver(connection): 25 | print('Receiver: Running', flush=True) 26 | # consume work 27 | while True: 28 | # get a unit of work 29 | item = connection.recv() 30 | # report 31 | print(f'>receiver got {item}', flush=True) 32 | # check for stop 33 | if item is None: 34 | break 35 | # all done 36 | print('Receiver: Done', flush=True) 37 | 38 | # protect the entry point 39 | if __name__ == '__main__': 40 | # create the pipe 41 | conn1, conn2 = Pipe() 42 | # start the sender 43 | sender_p = Process(target=sender, args=(conn2,)) 44 | sender_p.start() 45 | # start the receiver 46 | receiver_p = Process(target=receiver, args=(conn1,)) 47 | receiver_p.start() 48 | # wait for all processes to finish 49 | sender_p.join() 50 | receiver_p.join() 51 | -------------------------------------------------------------------------------- /src/lesson05_queue.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of producer and consumer processes with queue 3 | from time import sleep 4 | from random import random 5 | from multiprocessing import Process 6 | from multiprocessing import Queue 7 | 8 | # custom function for generating work (producer) 9 | def producer(shared_queue): 10 | print('Producer: Running', flush=True) 11 | # generate work 12 | for _ in range(10): 13 | # generate a value 14 | value = random() 15 | # block 16 | sleep(value) 17 | # add to the queue 18 | shared_queue.put(value) 19 | # all done 20 | shared_queue.put(None) 21 | print('Producer: Done', flush=True) 22 | 23 | # custom function for consuming work (consumer) 24 | def consumer(shared_queue): 25 | print('Consumer: Running', flush=True) 26 | # consume work 27 | while True: 28 | # get a unit of work 29 | item = shared_queue.get() 30 | # check for stop 31 | if item is None: 32 | break 33 | # report 34 | print(f'>got {item}', flush=True) 35 | # all done 36 | print('Consumer: Done', flush=True) 37 | 38 | # protect the entry point 39 | if __name__ == '__main__': 40 | # create the shared queue 41 | queue = Queue() 42 | # start the consumer 43 | consumer_p = Process(target=consumer, args=(queue,)) 44 | consumer_p.start() 45 | # start the producer 46 | producer_p = Process(target=producer, args=(queue,)) 47 | producer_p.start() 48 | # wait for all processes to finish 49 | producer_p.join() 50 | consumer_p.join() 51 | -------------------------------------------------------------------------------- /src/lesson06_apply_async.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of executing an async one-off task 3 | from multiprocessing import Pool 4 | 5 | # custom function to be executed in a child process 6 | def task(): 7 | # report a message 8 | print('This is another process', flush=True) 9 | 10 | # protect the entry point 11 | if __name__ == '__main__': 12 | # create the multiprocessing pool 13 | with Pool() as pool: 14 | # issue a task asynchronously 15 | async_result = pool.apply_async(task) 16 | # wait for the task to complete 17 | async_result.wait() 18 | -------------------------------------------------------------------------------- /src/lesson06_callback.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of a callback function for a one-off task 3 | from random import random 4 | from time import sleep 5 | from multiprocessing import Pool 6 | 7 | # result callback function 8 | def result_callback(return_value): 9 | # report a message 10 | print(f'Callback got: {return_value}', flush=True) 11 | 12 | # custom function to be executed in a child process 13 | def task(ident): 14 | # generate a value 15 | value = random() 16 | # report a message 17 | print(f'Task {ident} with {value}', flush=True) 18 | # block for a moment 19 | sleep(value) 20 | # return the generated value 21 | return value 22 | 23 | # protect the entry point 24 | if __name__ == '__main__': 25 | # create and configure the multiprocessing pool 26 | with Pool() as pool: 27 | # issue tasks to the multiprocessing pool 28 | result = pool.apply_async(task, args=(0,), 29 | callback=result_callback) 30 | # close the multiprocessing pool 31 | pool.close() 32 | # wait for all tasks to complete 33 | pool.join() 34 | -------------------------------------------------------------------------------- /src/lesson06_map.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example executing multiple tasks with different args 3 | from multiprocessing import Pool 4 | 5 | # custom function to be executed in a child process 6 | def task(arg): 7 | # report a message 8 | print(f'Worker task got {arg}', flush=True) 9 | # return a value 10 | return arg * 2 11 | 12 | # protect the entry point 13 | if __name__ == '__main__': 14 | # create the multiprocessing pool 15 | with Pool() as pool: 16 | # issue multiple tasks and process return values 17 | for result in pool.map(task, range(10)): 18 | # report result 19 | print(result) 20 | -------------------------------------------------------------------------------- /src/lesson07_manager_list.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of shared list among processes using a manager 3 | from time import sleep 4 | from random import random 5 | from multiprocessing import Process 6 | from multiprocessing import Manager 7 | 8 | # custom function to be executed in a child process 9 | def task(number, shared_list): 10 | # generate a number between 0 and 1 11 | value = random() 12 | # block for a fraction of a second 13 | sleep(value) 14 | # store the value in the shared list 15 | shared_list.append((number, value)) 16 | 17 | # protect the entry point 18 | if __name__ == '__main__': 19 | # create the manager 20 | with Manager() as manager: 21 | # create the shared list 22 | managed_list = manager.list() 23 | # create many child processes 24 | processes = [Process(target=task, 25 | args=(i, managed_list)) for i in range(50)] 26 | # start all processes 27 | for process in processes: 28 | process.start() 29 | # wait for all processes to complete 30 | for process in processes: 31 | process.join() 32 | # report the number of items stored 33 | print(f'List: {len(managed_list)}') 34 | -------------------------------------------------------------------------------- /src/lesson07_manager_semaphore.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of shared semaphore using a manager 3 | from time import sleep 4 | from random import random 5 | from multiprocessing import Manager 6 | from multiprocessing import Pool 7 | 8 | # custom function to be executed in a child process 9 | def task(number, shared_semaphore): 10 | # acquire the shared semaphore 11 | with shared_semaphore: 12 | # generate a number between 0 and 1 13 | value = random() 14 | # block for a fraction of a second 15 | sleep(value) 16 | # report the generated value 17 | print(f'{number} got {value}') 18 | 19 | # protect the entry point 20 | if __name__ == '__main__': 21 | # create the manager 22 | with Manager() as manager: 23 | # create the shared semaphore 24 | managed_sem = manager.Semaphore(2) 25 | # create the shared pool 26 | with Pool() as pool: 27 | # prepare arguments for task 28 | args = [(i,managed_sem) for i in range(10)] 29 | # issue many tasks to the process pool 30 | pool.starmap(task, args) 31 | --------------------------------------------------------------------------------