├── .gitignore ├── README.md ├── cover.png └── src ├── loop1.py ├── loop2.py ├── loop3.py ├── loop4.py ├── loop5.py └── loop6.py /.gitignore: -------------------------------------------------------------------------------- 1 | # git ignore 2 | **/.DS_Store 3 | .DS_Store 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Parallel Loops In Python 2 | 3 | ![Parallel Loops In Python](cover.png) 4 | 5 | * 6 | 7 | This repository provides all source code for the book: 8 | 9 | * **ParallelLoopsInPython**: _Run Python Loops on All CPU Cores_, Jason Brownlee, 2022. 10 | 11 | 12 | ## Source Code 13 | You can access all Python .py files directly here: 14 | 15 | * [src/](src/) 16 | 17 | ## Get the Book 18 | 19 | You can learn more about the book here: 20 | 21 | * [Gumroad](https://superfastpython.gumroad.com/l/plip) 22 | 23 | ### Book Blurb 24 | 25 | > Are your loops slow? 26 | > 27 | > Why not run your loops in parallel? 28 | > (using all cpu cores) 29 | > 30 | > Introducing: Parallel Loops in Python 31 | > 32 | > Discover this new Ebook that shows you how to execute Python loops in parallel using all CPU cores. -------------------------------------------------------------------------------- /cover.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SuperFastPython/ParallelLoopsInPython/d2557a9555e628a7811ebbc7ad63f8ee0a41c7d4/cover.png -------------------------------------------------------------------------------- /src/loop1.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of a parallel for loop with the Thread class 3 | from threading import Thread 4 | 5 | # execute a task 6 | def task(value): 7 | # add your work here... 8 | # ... 9 | # all done 10 | print(f'.done {value}') 11 | 12 | # protect the entry point 13 | if __name__ == '__main__': 14 | # create all tasks 15 | threads = [Thread(target=task, args=(i,)) for i in range(20)] 16 | # start all threads 17 | for thread in threads: 18 | thread.start() 19 | # wait for all threads to complete 20 | for thread in threads: 21 | thread.join() 22 | # report that all tasks are completed 23 | print('Done') 24 | -------------------------------------------------------------------------------- /src/loop2.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of a parallel for loop with the ThreadPool class 3 | from multiprocessing.pool import ThreadPool 4 | 5 | # execute a task 6 | def task(value): 7 | # add your work here... 8 | # ... 9 | # return a result, if needed 10 | return value 11 | 12 | # protect the entry point 13 | if __name__ == '__main__': 14 | # create the pool with the default number of workers 15 | with ThreadPool() as pool: 16 | # issue one task for each call to the function 17 | for result in pool.map(task, range(100)): 18 | # handle the result 19 | print(f'>got {result}') 20 | # report that all tasks are completed 21 | print('Done') -------------------------------------------------------------------------------- /src/loop3.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of a parallel for loop with the ThreadPoolExecutor class 3 | import concurrent.futures 4 | 5 | # execute a task 6 | def task(value): 7 | # add your work here... 8 | # return a result, if needed 9 | return value 10 | 11 | # protect the entry point 12 | if __name__ == '__main__': 13 | # create the pool with the default number of workers 14 | with concurrent.futures.ThreadPoolExecutor() as exe: 15 | # issue some tasks and collect futures 16 | futures = [exe.submit(task, i) for i in range(50)] 17 | # handle results as tasks are completed 18 | for future in concurrent.futures.as_completed(futures): 19 | print(f'>got {future.result}') 20 | # issue one task for each call to the function 21 | for result in exe.map(task, range(50)): 22 | print(f'>got {result}') 23 | # report that all tasks are completed 24 | print('Done') -------------------------------------------------------------------------------- /src/loop4.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of a parallel for loop with the Process class 3 | from multiprocessing import Process 4 | 5 | # execute a task 6 | def task(value): 7 | # add your work here... 8 | # ... 9 | # all done 10 | print(f'.done {value}', flush=True) 11 | 12 | # protect the entry point 13 | if __name__ == '__main__': 14 | # create all tasks 15 | processes = [Process(target=task, args=(i,)) for i in range(20)] 16 | # start all processes 17 | for process in processes: 18 | process.start() 19 | # wait for all processes to complete 20 | for process in processes: 21 | process.join() 22 | # report that all tasks are completed 23 | print('Done') 24 | -------------------------------------------------------------------------------- /src/loop5.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of a parallel for loop with the Pool class 3 | from multiprocessing import Pool 4 | 5 | # execute a task 6 | def task(value): 7 | # add your work here... 8 | # ... 9 | # return a result, if needed 10 | return value 11 | 12 | # protect the entry point 13 | if __name__ == '__main__': 14 | # create the pool with the default number of workers 15 | with Pool() as pool: 16 | # issue one task for each call to the function 17 | for result in pool.map(task, range(100)): 18 | # handle the result 19 | print(f'>got {result}') 20 | # report that all tasks are completed 21 | print('Done') 22 | -------------------------------------------------------------------------------- /src/loop6.py: -------------------------------------------------------------------------------- 1 | # SuperFastPython.com 2 | # example of a parallel for loop with the ProcessPoolExecutor class 3 | import concurrent.futures 4 | 5 | # execute a task 6 | def task(value): 7 | # add your work here... 8 | # return a result, if needed 9 | return value 10 | 11 | # protect the entry point 12 | if __name__ == '__main__': 13 | # create the pool with the default number of workers 14 | with concurrent.futures.ProcessPoolExecutor() as exe: 15 | # issue some tasks and collect futures 16 | futures = [exe.submit(task, i) for i in range(50)] 17 | # process results as tasks are completed 18 | for future in concurrent.futures.as_completed(futures): 19 | print(f'>got {future.result}') 20 | # issue one task for each call to the function 21 | for result in exe.map(task, range(50)): 22 | print(f'>got {result}') 23 | # report that all tasks are completed 24 | print('Done') --------------------------------------------------------------------------------