├── .babelrc ├── .bumpversion.cfg ├── .clang-format ├── .devcontainer ├── devcontainer.json └── tutorials │ └── devcontainer.json ├── .github ├── ISSUE_TEMPLATE │ ├── ---bug-report.yaml │ ├── ---feature-request.yaml │ └── config.yml ├── dependabot.yml └── workflows │ ├── build.yml │ ├── build_wheels.yml │ ├── coverage.yml │ ├── docs.yml │ ├── news-check.yml │ └── sanity-check.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── Makefile ├── NEWS.rst ├── README.md ├── asv.conf.json ├── benchmarks ├── __init__.py ├── benchmarking │ ├── __main__.py │ ├── cases │ │ ├── __init__.py │ │ ├── async_tree_base.py │ │ ├── async_tree_memray.py │ │ ├── deltablue_base.py │ │ ├── deltablue_memray.py │ │ ├── docutils_data │ │ │ └── docs │ │ │ │ ├── api │ │ │ │ ├── publisher.txt │ │ │ │ ├── runtime-settings.txt │ │ │ │ └── transforms.txt │ │ │ │ ├── dev │ │ │ │ ├── distributing.txt │ │ │ │ ├── enthought-plan.txt │ │ │ │ ├── enthought-rfp.txt │ │ │ │ ├── hacking.txt │ │ │ │ ├── policies.txt │ │ │ │ ├── pysource.txt │ │ │ │ ├── release.txt │ │ │ │ ├── repository.txt │ │ │ │ ├── rst │ │ │ │ │ ├── alternatives.txt │ │ │ │ │ └── problems.txt │ │ │ │ ├── runtime-settings-processing.txt │ │ │ │ ├── semantics.txt │ │ │ │ ├── testing.txt │ │ │ │ ├── todo.txt │ │ │ │ └── website.txt │ │ │ │ └── index.txt │ │ ├── docutils_html_base.py │ │ ├── docutils_html_memray.py │ │ ├── fannkuch_base.py │ │ ├── fannkuch_memray.py │ │ ├── go_base.py │ │ ├── go_memray.py │ │ ├── hexion_base.py │ │ ├── hexion_memray.py │ │ ├── json_dumps_base.py │ │ ├── json_dumps_memray.py │ │ ├── json_loads_base.py │ │ ├── json_loads_memray.py │ │ ├── mdp_base.py │ │ ├── mdp_memray.py │ │ ├── meteor_context_base.py │ │ ├── meteor_context_memray.py │ │ ├── nbody_base.py │ │ ├── nbody_memray.py │ │ ├── nqueens_base.py │ │ ├── nqueens_memray.py │ │ ├── pickles_base.py │ │ ├── pickles_memray.py │ │ ├── pprint_format_base.py │ │ ├── pprint_format_memray.py │ │ ├── raytrace_base.py │ │ ├── raytrace_memray.py │ │ ├── regex_dna_base.py │ │ ├── regex_dna_memray.py │ │ ├── regex_effbot_base.py │ │ ├── regex_effbot_memray.py │ │ ├── regex_v8_base.py │ │ ├── regex_v8_memray.py │ │ ├── spectral_norm_base.py │ │ ├── spectral_norm_memray.py │ │ ├── sqlite_synth_base.py │ │ ├── sqlite_synth_memray.py │ │ ├── telco_base.py │ │ ├── telco_data │ │ │ └── telco-bench.b │ │ └── telco_memray.py │ └── plot.py ├── benchmarks.py ├── plot_defaut.png ├── plot_native.png ├── plot_python_allocators.png ├── plot_python_allocators_+_native.png └── requirements.txt ├── docs ├── Makefile ├── _static │ ├── css │ │ └── custom.css │ ├── flamegraphs │ │ ├── .gitattributes │ │ ├── memray-flamegraph-fib.html │ │ ├── memray-flamegraph-mandelbrot.html │ │ ├── memray-flamegraph-nbody.html │ │ └── memray-flamegraph-sqlite.html │ ├── images │ │ ├── circle_inner.png │ │ ├── circle_inner_opt.png │ │ ├── circle_outer.png │ │ ├── circle_outer_opt.png │ │ ├── codespaces_testing_tab.png │ │ ├── complex_example.png │ │ ├── exercise1_flamegraph.png │ │ ├── exercise2_flamegraph.png │ │ ├── exercise3_flamegraph_basic.png │ │ ├── exercise3_flamegraph_native.png │ │ ├── filter_thread_dropdown.png │ │ ├── flamegraph_example.png │ │ ├── gprof2dot_example.png │ │ ├── icicle_flame_toggle.png │ │ ├── inverted_flame_graph_for_inverted_example.png │ │ ├── live_animated.webp │ │ ├── live_different_thread.png │ │ ├── live_disconnected.png │ │ ├── live_running.png │ │ ├── logo.png │ │ ├── mandelbrot_operation_native.png │ │ ├── mandelbrot_operation_non_native.png │ │ ├── memray.png │ │ ├── native_mode_debug.png │ │ ├── native_mode_no_debug.png │ │ ├── non_relevant_checkbox.png │ │ ├── normal_flamegraph_for_inverted_example.png │ │ ├── output.png │ │ ├── plot_default_options.png │ │ ├── plot_defaut.png │ │ ├── plot_native.png │ │ ├── plot_python_allocators.png │ │ ├── plot_python_allocators_+_native.png │ │ ├── plot_python_allocators_native.png │ │ ├── ports_tab.png │ │ ├── pymalloc.png │ │ ├── pytest_cli_output.png │ │ ├── quotes.png │ │ ├── rss_vs_heap.png │ │ ├── rss_vs_heap_no_free.png │ │ ├── simple_example.png │ │ ├── stats_example.png │ │ ├── summary_example.png │ │ ├── table_example.png │ │ ├── temporal_high_water_mark_controls.png │ │ └── tree_example.png │ └── js │ │ └── custom.js ├── _templates │ └── index.html ├── api.rst ├── attach.rst ├── changelog.rst ├── conf.py ├── examples │ ├── README.rst │ ├── fibonacci │ │ └── fib.py │ ├── mandelbrot │ │ ├── mandelbrot-threaded.py │ │ ├── mandelbrot.py │ │ └── requirements.txt │ ├── nbody │ │ ├── example.py │ │ └── requirements.txt │ └── sqlite │ │ └── example.py ├── favicon.ico ├── flamegraph.rst ├── getting_started.rst ├── index.rst ├── jupyter_magic.rst ├── licenses.rst ├── live.rst ├── manpage.rst ├── memory.rst ├── native_mode.rst ├── overview.rst ├── performance.rst ├── python_allocators.rst ├── run.rst ├── stats.rst ├── summary.rst ├── supported_environments.rst ├── table.rst ├── temporary_allocations.rst ├── transform.rst ├── tree.rst └── tutorials │ ├── 1.rst │ ├── 2.rst │ ├── 3.rst │ ├── Dockerfile │ ├── additional_features.rst │ ├── exercise_1 │ ├── __init__.py │ └── fibonacci.py │ ├── exercise_2 │ ├── __init__.py │ └── holding_onto_memory.py │ ├── exercise_3 │ ├── __init__.py │ └── lru_cache.py │ ├── index.rst │ ├── requirements-tutorial.txt │ ├── solutions │ ├── exercise_1 │ │ └── fibonacci.py │ ├── exercise_2 │ │ └── holding_onto_memory.py │ └── exercise_3 │ │ └── lru_cache.py │ └── tests │ ├── __init__.py │ ├── test_exercise_1.py │ ├── test_exercise_2.py │ └── test_exercise_3.py ├── news ├── .gitignore ├── 742.removal.1.rst └── 742.removal.2.rst ├── package-lock.json ├── package.json ├── pyproject.toml ├── requirements-docs.txt ├── requirements-extra.txt ├── requirements-test.txt ├── setup.py ├── src ├── memray │ ├── __init__.py │ ├── __init__.pyi │ ├── __main__.py │ ├── _destination.py │ ├── _errors.py │ ├── _ipython │ │ ├── __init__.py │ │ └── flamegraph.py │ ├── _memray.pyi │ ├── _memray.pyx │ ├── _memray │ │ ├── CMakeLists.txt │ │ ├── __init__.pxd │ │ ├── algorithm.pxd │ │ ├── alloc.h │ │ ├── alloc.pxd │ │ ├── compat.cpp │ │ ├── compat.h │ │ ├── elf_shenanigans.cpp │ │ ├── elf_utils.h │ │ ├── exceptions.h │ │ ├── frame_tree.h │ │ ├── hooks.cpp │ │ ├── hooks.h │ │ ├── hooks.pxd │ │ ├── inject.cpp │ │ ├── linker_shenanigans.h │ │ ├── logging.cpp │ │ ├── logging.h │ │ ├── logging.pxd │ │ ├── lz4_stream.h │ │ ├── macho_shenanigans.cpp │ │ ├── macho_utils.h │ │ ├── native_resolver.cpp │ │ ├── native_resolver.h │ │ ├── native_resolver.pxd │ │ ├── pthread.pxd │ │ ├── python_helpers.cpp │ │ ├── python_helpers.h │ │ ├── record_reader.cpp │ │ ├── record_reader.h │ │ ├── record_reader.pxd │ │ ├── record_writer.cpp │ │ ├── record_writer.h │ │ ├── record_writer.pxd │ │ ├── records.cpp │ │ ├── records.h │ │ ├── records.pxd │ │ ├── sink.cpp │ │ ├── sink.h │ │ ├── sink.pxd │ │ ├── snapshot.cpp │ │ ├── snapshot.h │ │ ├── snapshot.pxd │ │ ├── socket_reader_thread.cpp │ │ ├── socket_reader_thread.h │ │ ├── socket_reader_thread.pxd │ │ ├── source.cpp │ │ ├── source.h │ │ ├── source.pxd │ │ ├── tracking_api.cpp │ │ ├── tracking_api.h │ │ └── tracking_api.pxd │ ├── _memray_test_utils.pyx │ ├── _metadata.py │ ├── _stats.py │ ├── _stats.pyi │ ├── _test.py │ ├── _test_utils.pyi │ ├── _thread_name_interceptor.py │ ├── _version.py │ ├── commands │ │ ├── __init__.py │ │ ├── _attach.gdb │ │ ├── _attach.lldb │ │ ├── attach.py │ │ ├── common.py │ │ ├── flamegraph.py │ │ ├── live.py │ │ ├── parse.py │ │ ├── run.py │ │ ├── stats.py │ │ ├── summary.py │ │ ├── table.py │ │ ├── transform.py │ │ └── tree.py │ ├── py.typed │ └── reporters │ │ ├── __init__.py │ │ ├── _textual_hacks.py │ │ ├── assets │ │ ├── README.md │ │ ├── __init__.py │ │ ├── common.js │ │ ├── common.test.js │ │ ├── flamegraph.js │ │ ├── flamegraph_common.js │ │ ├── table.js │ │ └── temporal_flamegraph.js │ │ ├── common.py │ │ ├── flamegraph.py │ │ ├── frame_tools.py │ │ ├── stats.py │ │ ├── summary.py │ │ ├── table.py │ │ ├── templates │ │ ├── __init__.py │ │ ├── assets │ │ │ ├── .gitattributes │ │ │ ├── flamegraph.css │ │ │ ├── flamegraph.js │ │ │ ├── flamegraph_common.js │ │ │ ├── table.css │ │ │ ├── table.js │ │ │ └── temporal_flamegraph.js │ │ ├── base.html │ │ ├── classic_base.html │ │ ├── flamegraph.html │ │ ├── table.html │ │ └── temporal_flamegraph.html │ │ ├── transform.py │ │ ├── tree.css │ │ ├── tree.py │ │ ├── tui.css │ │ └── tui.py └── vendor │ ├── libbacktrace-patches │ ├── 0001-Expose-some-internal-functions-of-libbacktrace.patch │ └── 0002-Add-debuginfod-support-to-libbacktrace.patch │ ├── libbacktrace │ ├── .gitignore │ ├── Isaac.Newton-Opticks.txt │ ├── LICENSE │ ├── Makefile.am │ ├── Makefile.in │ ├── README.md │ ├── aclocal.m4 │ ├── alloc.c │ ├── allocfail.c │ ├── allocfail.sh │ ├── atomic.c │ ├── backtrace-supported.h.in │ ├── backtrace.c │ ├── backtrace.h │ ├── btest.c │ ├── compile │ ├── config.guess │ ├── config.h.in │ ├── config.sub │ ├── config │ │ ├── enable.m4 │ │ ├── lead-dot.m4 │ │ ├── libtool.m4 │ │ ├── ltoptions.m4 │ │ ├── ltsugar.m4 │ │ ├── ltversion.m4 │ │ ├── lt~obsolete.m4 │ │ ├── multi.m4 │ │ ├── override.m4 │ │ ├── unwind_ipinfo.m4 │ │ └── warnings.m4 │ ├── configure │ ├── configure.ac │ ├── debuginfod_support.h │ ├── dwarf.c │ ├── edtest.c │ ├── edtest2.c │ ├── elf.c │ ├── fileline.c │ ├── filenames.h │ ├── filetype.awk │ ├── install-debuginfo-for-buildid.sh.in │ ├── install-sh │ ├── instrumented_alloc.c │ ├── internal.h │ ├── ltmain.sh │ ├── macho.c │ ├── missing │ ├── mmap.c │ ├── mmapio.c │ ├── move-if-change │ ├── mtest.c │ ├── nounwind.c │ ├── pecoff.c │ ├── posix.c │ ├── print.c │ ├── read.c │ ├── simple.c │ ├── sort.c │ ├── state.c │ ├── stest.c │ ├── test-driver │ ├── test_format.c │ ├── testlib.c │ ├── testlib.h │ ├── ttest.c │ ├── unittest.c │ ├── unknown.c │ ├── xcoff.c │ ├── xztest.c │ ├── zstdtest.c │ └── ztest.c │ └── regenerate_libbacktrace.sh ├── tests ├── __init__.py ├── conftest.py ├── integration │ ├── __init__.py │ ├── misbehaving_extension │ │ ├── __init__.py │ │ ├── misbehaving.cpp │ │ └── setup.py │ ├── multithreaded_extension │ │ ├── __init__.py │ │ ├── main.py │ │ ├── setup.py │ │ └── testext.cpp │ ├── native_extension │ │ ├── main.py │ │ ├── native_ext.c │ │ └── setup.py │ ├── rpath_extension │ │ ├── ext.c │ │ ├── setup.py │ │ └── sharedlibs │ │ │ └── sharedlib.c │ ├── test_api.py │ ├── test_attach.py │ ├── test_extensions.py │ ├── test_greenlet.py │ ├── test_ipython.py │ ├── test_main.py │ ├── test_native_tracking.py │ ├── test_processes.py │ ├── test_socket.py │ ├── test_threads.py │ ├── test_tracing.py │ └── test_tracking.py ├── test_utils.py ├── unit │ ├── __init__.py │ ├── __snapshots__ │ │ ├── test_tree_reporter │ │ │ ├── TestTUILooks.test_allocations_of_different_sizes.svg │ │ │ ├── TestTUILooks.test_basic.svg │ │ │ ├── TestTUILooks.test_basic_node_selected_leaf.svg │ │ │ ├── TestTUILooks.test_basic_node_selected_not_leaf.svg │ │ │ ├── TestTUILooks.test_biggest_allocations.svg │ │ │ ├── TestTUILooks.test_hide_import_system.svg │ │ │ ├── TestTUILooks.test_select_screen.svg │ │ │ ├── TestTUILooks.test_show_uninteresting.svg │ │ │ ├── TestTUILooks.test_show_uninteresting_and_hide_import_system.svg │ │ │ ├── TestTUILooks.test_two_chains.svg │ │ │ └── TestTUILooks.test_two_chains_after_expanding_second.svg │ │ └── test_tui_reporter │ │ │ ├── test_merge_threads.svg │ │ │ ├── test_tui_basic[narrow-terminal-focus-header-long-snapshots].svg │ │ │ ├── test_tui_basic[narrow-terminal-short-snapshots].svg │ │ │ ├── test_tui_basic[very-wide-terminal-short-snapshots].svg │ │ │ ├── test_tui_basic[wide-terminal-long-snapshots].svg │ │ │ ├── test_tui_gradient.svg │ │ │ ├── test_tui_pause[narrow-terminal-connected].svg │ │ │ ├── test_tui_pause[narrow-terminal-disconnected].svg │ │ │ ├── test_tui_pause[wider-terminal].svg │ │ │ └── test_unmerge_threads.svg │ ├── conftest.py │ ├── test_allocation_lifetime_aggregator.py │ ├── test_attach.py │ ├── test_cli.py │ ├── test_flamegraph_reporter.py │ ├── test_frame_tools.py │ ├── test_high_water_mark_aggregator.py │ ├── test_highwatermark_command.py │ ├── test_reader.py │ ├── test_stats_reporter.py │ ├── test_summary_reporter.py │ ├── test_table_reporter.py │ ├── test_templates.py │ ├── test_tracker.py │ ├── test_transform_reporter.py │ ├── test_tree_reporter.py │ └── test_tui_reporter.py └── utils.py ├── valgrind.supp └── webpack.config.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["@babel/preset-env"] 3 | } 4 | -------------------------------------------------------------------------------- /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 1.17.2 3 | commit = True 4 | message = 5 | Prepare for {new_version} release 6 | 7 | See changelog for more details. 8 | 9 | [bumpversion:file:src/memray/_version.py] 10 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Memray development", 3 | "build": { 4 | "context": "..", 5 | "dockerfile": "../Dockerfile" 6 | }, 7 | "runArgs": ["--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"], 8 | "onCreateCommand": "pip install -e ." 9 | } 10 | -------------------------------------------------------------------------------- /.devcontainer/tutorials/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Memray tutorials", 3 | "build": { 4 | "context": "../../docs/tutorials", 5 | "dockerfile": "../../docs/tutorials/Dockerfile" 6 | }, 7 | "customizations": { 8 | "vscode": { 9 | "settings": { 10 | "python.testing.pytestArgs": ["docs/tutorials/tests"], 11 | "python.testing.unittestEnabled": false, 12 | "python.testing.pytestEnabled": true, 13 | "python.defaultInterpreterPath": "/venv/bin/python" 14 | }, 15 | "extensions": ["ms-python.python"] 16 | } 17 | }, 18 | "runArgs": ["--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"] 19 | } 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/---bug-report.yaml: -------------------------------------------------------------------------------- 1 | name: 🐞 Bug Report 2 | description: If something isn't working as expected 3 | labels: [bug] 4 | body: 5 | - type: checkboxes 6 | attributes: 7 | label: Is there an existing issue for this? 8 | description: Please search to see if an issue already exists for the bug you encountered. 9 | options: 10 | - label: I have searched the existing issues 11 | required: true 12 | - type: textarea 13 | attributes: 14 | label: Current Behavior 15 | description: A concise description of what you're experiencing. 16 | validations: 17 | required: false 18 | - type: textarea 19 | attributes: 20 | label: Expected Behavior 21 | description: A concise description of what you expected to happen. 22 | validations: 23 | required: false 24 | - type: textarea 25 | attributes: 26 | label: Steps To Reproduce 27 | description: Steps to reproduce the behavior. 28 | placeholder: | 29 | 1. In this environment... 30 | 2. With this config... 31 | 3. Run '...' 32 | 4. See error... 33 | validations: 34 | required: True 35 | - type: input 36 | id: memray_version 37 | attributes: 38 | label: Memray Version 39 | description: What version of Memray are you seeing the problem on? 40 | placeholder: 1.3.0 41 | validations: 42 | required: true 43 | - type: dropdown 44 | id: python_version 45 | attributes: 46 | label: Python Version 47 | description: What version of Python are you running? 48 | multiple: true 49 | options: 50 | - "3.7" 51 | - "3.8" 52 | - "3.9" 53 | - "3.10" 54 | - "3.11" 55 | - "3.12" 56 | - "3.13" 57 | validations: 58 | required: true 59 | - type: dropdown 60 | id: os 61 | attributes: 62 | label: Operating System 63 | description: What OS are you seeing the problem on? 64 | multiple: true 65 | options: 66 | - macOS 67 | - Linux 68 | validations: 69 | required: true 70 | - type: textarea 71 | attributes: 72 | label: Anything else? 73 | description: | 74 | Links? References? Anything that will give us more context about the issue you are encountering! 75 | 76 | Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. 77 | validations: 78 | required: false 79 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/---feature-request.yaml: -------------------------------------------------------------------------------- 1 | name: 🚀 Feature Request 2 | description: Suggest an idea for this project 3 | labels: [enhancement] 4 | 5 | body: 6 | - type: checkboxes 7 | attributes: 8 | label: Is there an existing proposal for this? 9 | description: Please search to see if a proposal already exists for this feature. 10 | options: 11 | - label: I have searched the existing proposals 12 | required: true 13 | - type: textarea 14 | attributes: 15 | label: Is your feature request related to a problem? 16 | description: A clear and concise description of what the problem is 17 | placeholder: | 18 | I have an issue when [...] 19 | validations: 20 | required: True 21 | - type: textarea 22 | attributes: 23 | label: Describe the solution you'd like 24 | description: A clear and concise description of what you want to happen. Add any considered drawbacks. 25 | placeholder: | 26 | I would love if Memray could [...] 27 | validations: 28 | required: True 29 | - type: textarea 30 | attributes: 31 | label: Alternatives you considered 32 | description: A clear and concise description of any alternative solutions or features you've considered. 33 | validations: 34 | required: False 35 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: Long question or idea 4 | url: https://github.com/bloomberg/memray/discussions 5 | about: Ask long-form questions and discuss ideas. 6 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | -------------------------------------------------------------------------------- /.github/workflows/coverage.yml: -------------------------------------------------------------------------------- 1 | name: Coverage 2 | 3 | permissions: 4 | pull-requests: write 5 | 6 | on: 7 | push: 8 | branches: 9 | - main 10 | pull_request: 11 | branches: 12 | - main 13 | release: 14 | types: 15 | - published 16 | schedule: 17 | # At 12:00 on every day-of-month 18 | - cron: "0 12 */1 * *" 19 | 20 | concurrency: 21 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 22 | cancel-in-progress: true 23 | 24 | jobs: 25 | coverage: 26 | runs-on: ubuntu-22.04 27 | strategy: 28 | fail-fast: false 29 | steps: 30 | - uses: actions/checkout@v4 31 | - name: Set up Python 32 | uses: actions/setup-python@v5 33 | with: 34 | python-version: "3.10" 35 | - name: Set up dependencies 36 | run: | 37 | sudo apt-get update 38 | sudo apt-get install -qy \ 39 | pkg-config \ 40 | libdebuginfod-dev \ 41 | libunwind-dev \ 42 | liblz4-dev \ 43 | gdb \ 44 | lcov \ 45 | libdw-dev \ 46 | libelf-dev \ 47 | python3.10-dev \ 48 | python3.10-dbg 49 | - name: Install Python dependencies 50 | run: | 51 | python3 -m pip install --upgrade pip cython pkgconfig 52 | make test-install 53 | - name: Disable ptrace security restrictions 54 | run: | 55 | echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope 56 | - name: Add ./node_modules/.bin to PATH 57 | run: | 58 | export PATH="./node_modules/.bin:$PATH" 59 | - name: Compute C++ coverage 60 | run: | 61 | make ccoverage 62 | - name: Compute Python + Cython coverage 63 | run: | 64 | make pycoverage 65 | - name: Upload C++ report to Codecov 66 | uses: codecov/codecov-action@v5 67 | with: 68 | token: ${{ secrets.CODECOV_TOKEN }} 69 | files: cppcoverage.lcov 70 | flags: cpp 71 | - name: Upload {P,C}ython report to Codecov 72 | uses: codecov/codecov-action@v5 73 | with: 74 | token: ${{ secrets.CODECOV_TOKEN }} 75 | files: pycoverage.lcov 76 | flags: python_and_cython 77 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Docs 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | publish_docs: 10 | name: Publish docs 11 | runs-on: ubuntu-latest 12 | #if: github.event_name == 'release' && github.event.action == 'published' 13 | permissions: 14 | contents: write 15 | steps: 16 | - uses: actions/checkout@v4 17 | - name: Set up Python 18 | uses: actions/setup-python@v5 19 | with: 20 | python-version: "3.10" 21 | - name: Set up dependencies 22 | run: | 23 | sudo apt-get update 24 | sudo apt-get install --no-install-recommends -qy libdebuginfod-dev libunwind-dev liblz4-dev pkg-config 25 | - name: Install Python dependencies 26 | run: | 27 | python3 -m pip install -r requirements-extra.txt 28 | - name: Install Package 29 | run: | 30 | python3 -m pip install -e . 31 | - name: Build docs 32 | run: | 33 | make docs 34 | - name: Publish docs to GitHub Pages 35 | uses: JamesIves/github-pages-deploy-action@v4 36 | with: 37 | folder: docs/_build/html 38 | single-commit: true 39 | -------------------------------------------------------------------------------- /.github/workflows/news-check.yml: -------------------------------------------------------------------------------- 1 | name: News entry check 2 | on: 3 | pull_request: 4 | paths: 5 | - "src/memray/**" 6 | types: 7 | - "opened" 8 | - "reopened" 9 | - "synchronize" 10 | - "labeled" 11 | - "unlabeled" 12 | 13 | jobs: 14 | news_entry_check: 15 | runs-on: ubuntu-latest 16 | name: Check for news entry 17 | steps: 18 | - name: "Check for news entry" 19 | uses: brettcannon/check-for-changed-files@v1 20 | with: 21 | file-pattern: | 22 | news/*.rst 23 | NEWS.rst 24 | skip-label: "skip news" 25 | failure-message: "Missing a news file in ${file-pattern}; please add one or apply the ${skip-label} label to the pull request" 26 | -------------------------------------------------------------------------------- /.github/workflows/sanity-check.yml: -------------------------------------------------------------------------------- 1 | name: Sanity check 2 | on: 3 | pull_request: 4 | types: 5 | - "opened" 6 | - "reopened" 7 | - "synchronize" 8 | - "labeled" 9 | - "unlabeled" 10 | 11 | jobs: 12 | commits_check_job: 13 | runs-on: ubuntu-latest 14 | name: Commits Check 15 | steps: 16 | - name: Get PR Commits 17 | id: "get-pr-commits" 18 | uses: tim-actions/get-pr-commits@master 19 | with: 20 | token: ${{ secrets.GITHUB_TOKEN }} 21 | - name: DCO Check 22 | uses: tim-actions/dco@master 23 | with: 24 | commits: ${{ steps.get-pr-commits.outputs.commits }} 25 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: "^(src/memray/reporters/templates/assets|src/vendor|benchmarks|docs/_static/flamegraphs)/" 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v4.5.0 5 | hooks: 6 | - id: check-added-large-files 7 | - id: check-json 8 | exclude: "^asv\\.conf\\.json$" 9 | - id: check-merge-conflict 10 | - id: check-toml 11 | - id: check-yaml 12 | - id: end-of-file-fixer 13 | - id: trailing-whitespace 14 | exclude: "^([.]bumpversion[.]cfg|.*/__snapshots__/)" 15 | 16 | - repo: https://github.com/pre-commit/pygrep-hooks 17 | rev: v1.10.0 18 | hooks: 19 | - id: rst-directive-colons 20 | - id: rst-inline-touching-normal 21 | 22 | - repo: https://github.com/charliermarsh/ruff-pre-commit 23 | rev: "v0.1.7" 24 | hooks: 25 | - id: ruff 26 | args: [--fix, --exit-non-zero-on-fix] 27 | 28 | - repo: https://github.com/psf/black 29 | rev: 23.12.0 30 | hooks: 31 | - id: black 32 | 33 | - repo: https://github.com/pycqa/isort 34 | rev: 5.13.2 35 | hooks: 36 | - id: isort 37 | exclude_types: [python] 38 | 39 | - repo: https://github.com/sphinx-contrib/sphinx-lint 40 | rev: v0.9.1 41 | hooks: 42 | - id: sphinx-lint 43 | 44 | - repo: https://github.com/pre-commit/mirrors-prettier 45 | rev: v3.1.0 46 | hooks: 47 | - id: prettier 48 | args: [--no-editorconfig] 49 | exclude: "^asv\\.conf\\.json$" 50 | exclude_types: [html] 51 | 52 | - repo: https://github.com/pre-commit/mirrors-clang-format 53 | rev: v17.0.6 54 | hooks: 55 | - id: clang-format 56 | args: [--Werror, -i] 57 | exclude: "^tests/integration/" 58 | types_or: [c++, c, cuda] 59 | 60 | - repo: https://github.com/rstcheck/rstcheck 61 | rev: v6.2.0 62 | hooks: 63 | - id: rstcheck 64 | files: ^news/ 65 | types: [rst] 66 | additional_dependencies: ["sphinx"] 67 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bookworm-slim 2 | 3 | ARG DEBIAN_FRONTEND=noninteractive 4 | 5 | RUN apt-get update \ 6 | && apt-get install -y --force-yes --no-install-recommends \ 7 | build-essential \ 8 | libdebuginfod-dev \ 9 | libunwind-dev \ 10 | liblz4-dev \ 11 | pkg-config \ 12 | python3-dev \ 13 | python3-dbg \ 14 | python3-pip \ 15 | python3-venv \ 16 | make \ 17 | cmake \ 18 | gdb \ 19 | valgrind \ 20 | lcov \ 21 | nodejs \ 22 | npm \ 23 | clang-format \ 24 | git \ 25 | ccache \ 26 | wget \ 27 | && apt-get clean \ 28 | && rm -rf /var/lib/apt/lists/* 29 | 30 | ENV VIRTUAL_ENV=/venv \ 31 | PYTHONDONTWRITEBYTECODE=1 \ 32 | PATH=/bin:$PATH \ 33 | CC=gcc \ 34 | CXX=g++ 35 | 36 | RUN python3 -m venv "$VIRTUAL_ENV" 37 | 38 | ENV PATH="${VIRTUAL_ENV}/bin:/usr/lib/ccache:${PATH}" \ 39 | PYTHON="${VIRTUAL_ENV}/bin/python" \ 40 | MEMRAY_MINIMIZE_INLINING="1" 41 | 42 | COPY requirements-test.txt requirements-extra.txt requirements-docs.txt /tmp/ 43 | 44 | RUN $PYTHON -m pip install -U \ 45 | -r /tmp/requirements-extra.txt \ 46 | -r /tmp/requirements-test.txt \ 47 | -r /tmp/requirements-docs.txt \ 48 | cython \ 49 | pkgconfig \ 50 | setuptools \ 51 | wheel 52 | 53 | RUN npm install -g prettier 54 | 55 | WORKDIR /src 56 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | exclude .clang-format 2 | exclude asv.conf.json 3 | exclude CONTRIBUTING.md 4 | exclude Dockerfile 5 | exclude Jenkinsfile 6 | exclude requirements-*.txt 7 | exclude .medusarc 8 | exclude valgrind.supp 9 | 10 | recursive-exclude src/vendor/libbacktrace/install * 11 | recursive-exclude benchmarks * 12 | recursive-exclude debian * 13 | recursive-exclude docker * 14 | recursive-exclude docs * 15 | recursive-exclude src/memray *.cpp *.h 16 | recursive-exclude src/memray *.md 17 | recursive-exclude tests * 18 | recursive-exclude news * 19 | recursive-exclude vendor * 20 | 21 | include README.md 22 | include Makefile 23 | include pyproject.toml 24 | include package.json 25 | include package-lock.json 26 | include .bumpversion.cfg 27 | include .babelrc 28 | include webpack.config.js 29 | include NEWS.rst 30 | include .flake8 31 | include src/memray/py.typed 32 | include .pre-commit-config.yaml 33 | 34 | recursive-include src/vendor * 35 | recursive-include src/memray *.py 36 | recursive-include src/memray *.pyi 37 | recursive-include src/memray *.html *.js *.css 38 | recursive-include src/memray *.pyx *.pxd 39 | recursive-include src/memray *.gdb *.lldb 40 | recursive-include src/memray/_memray * 41 | recursive-include tools *.sh 42 | -------------------------------------------------------------------------------- /benchmarks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/benchmarks/__init__.py -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/benchmarks/benchmarking/cases/__init__.py -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/docutils_html_base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Convert Docutils' documentation from reStructuredText to . 3 | """ 4 | 5 | import contextlib 6 | from pathlib import Path 7 | import time 8 | 9 | import docutils 10 | from docutils import core 11 | import pyperf 12 | import memray 13 | import contextlib 14 | 15 | try: 16 | from docutils.utils.math.math2html import Trace 17 | except ImportError: 18 | pass 19 | else: 20 | Trace.show = lambda message, channel: ... # don't print to console 21 | 22 | DOC_ROOT = (Path(__file__).parent / "docutils_data" / "docs").resolve() 23 | 24 | 25 | def build_html(doc_root): 26 | elapsed = 0 27 | for file in doc_root.rglob("*.txt"): 28 | file_contents = file.read_text(encoding="utf-8") 29 | t0 = pyperf.perf_counter() 30 | with contextlib.nullcontext(): 31 | with contextlib.suppress(docutils.ApplicationError): 32 | core.publish_string( 33 | source=file_contents, 34 | reader_name="standalone", 35 | parser_name="restructuredtext", 36 | writer_name="html5", 37 | settings_overrides={ 38 | "input_encoding": "unicode", 39 | "output_encoding": "unicode", 40 | "report_level": 5, 41 | }, 42 | ) 43 | elapsed += pyperf.perf_counter() - t0 44 | return elapsed 45 | 46 | 47 | def bench_docutils(loops, doc_root): 48 | runs_total = 0 49 | for _ in range(loops): 50 | runs_total += build_html(doc_root) 51 | return runs_total 52 | 53 | 54 | def run_benchmark(): 55 | bench_docutils(1, DOC_ROOT) 56 | 57 | 58 | if __name__ == "__main__": 59 | run_benchmark() 60 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/docutils_html_memray.py: -------------------------------------------------------------------------------- 1 | """ 2 | Convert Docutils' documentation from reStructuredText to . 3 | """ 4 | 5 | import contextlib 6 | from pathlib import Path 7 | 8 | import docutils 9 | from docutils import core 10 | import pyperf 11 | from memray_helper import get_tracker 12 | 13 | from docutils.utils.math.math2html import Trace 14 | 15 | Trace.show = lambda message, channel: ... # don't print to console 16 | 17 | DOC_ROOT = (Path(__file__).parent / "docutils_data" / "docs").resolve() 18 | 19 | 20 | def build_html(doc_root): 21 | elapsed = 0.0 22 | for file in doc_root.rglob("*.txt"): 23 | file_contents = file.read_text(encoding="utf-8") 24 | t0 = pyperf.perf_counter() 25 | with get_tracker(): 26 | with contextlib.suppress(docutils.ApplicationError): 27 | core.publish_string( 28 | source=file_contents, 29 | reader_name="standalone", 30 | parser_name="restructuredtext", 31 | writer_name="html5", 32 | settings_overrides={ 33 | "input_encoding": "unicode", 34 | "output_encoding": "unicode", 35 | "report_level": 5, 36 | }, 37 | ) 38 | elapsed += pyperf.perf_counter() - t0 39 | return elapsed 40 | 41 | 42 | def bench_docutils(loops, doc_root): 43 | runs_total = 0 44 | for _ in range(loops): 45 | runs_total += build_html(doc_root) 46 | return runs_total 47 | 48 | 49 | def add_cmdline_args(cmd, args): 50 | cmd.append("--doc_root=%s" % args.doc_root) 51 | 52 | 53 | if __name__ == "__main__": 54 | runner = pyperf.Runner(add_cmdline_args=add_cmdline_args) 55 | runner.metadata["description"] = "Render documentation with Docutils" 56 | runner.argparser.add_argument("--doc_root", default=DOC_ROOT) 57 | 58 | args = runner.parse_args() 59 | runner.bench_time_func("docutils", bench_docutils, Path(args.doc_root)) 60 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/fannkuch_base.py: -------------------------------------------------------------------------------- 1 | """ 2 | The Computer Language Benchmarks Game 3 | http://benchmarksgame.alioth.debian.org/ 4 | 5 | Contributed by Sokolov Yura, modified by Tupteq. 6 | """ 7 | 8 | DEFAULT_ARG = 9 9 | 10 | 11 | def fannkuch(n): 12 | count = list(range(1, n + 1)) 13 | max_flips = 0 14 | m = n - 1 15 | r = n 16 | perm1 = list(range(n)) 17 | perm = list(range(n)) 18 | perm1_ins = perm1.insert 19 | perm1_pop = perm1.pop 20 | 21 | while 1: 22 | while r != 1: 23 | count[r - 1] = r 24 | r -= 1 25 | 26 | if perm1[0] != 0 and perm1[m] != m: 27 | perm = perm1[:] 28 | flips_count = 0 29 | k = perm[0] 30 | while k: 31 | perm[: k + 1] = perm[k::-1] 32 | flips_count += 1 33 | k = perm[0] 34 | 35 | if flips_count > max_flips: 36 | max_flips = flips_count 37 | 38 | while r != n: 39 | perm1_ins(r, perm1_pop(0)) 40 | count[r] -= 1 41 | if count[r] > 0: 42 | break 43 | r += 1 44 | else: 45 | return max_flips 46 | 47 | 48 | def run_benchmark(): 49 | arg = DEFAULT_ARG 50 | fannkuch(arg) 51 | 52 | 53 | if __name__ == "__main__": 54 | run_benchmark() 55 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/fannkuch_memray.py: -------------------------------------------------------------------------------- 1 | """ 2 | The Computer Language Benchmarks Game 3 | http://benchmarksgame.alioth.debian.org/ 4 | 5 | Contributed by Sokolov Yura, modified by Tupteq. 6 | """ 7 | 8 | import pyperf 9 | from memray_helper import get_tracker 10 | 11 | 12 | DEFAULT_ARG = 9 13 | 14 | 15 | def fannkuch(n): 16 | with get_tracker(): 17 | return _fannkuch(n) 18 | 19 | 20 | def _fannkuch(n): 21 | count = list(range(1, n + 1)) 22 | max_flips = 0 23 | m = n - 1 24 | r = n 25 | perm1 = list(range(n)) 26 | perm = list(range(n)) 27 | perm1_ins = perm1.insert 28 | perm1_pop = perm1.pop 29 | 30 | while 1: 31 | while r != 1: 32 | count[r - 1] = r 33 | r -= 1 34 | 35 | if perm1[0] != 0 and perm1[m] != m: 36 | perm = perm1[:] 37 | flips_count = 0 38 | k = perm[0] 39 | while k: 40 | perm[: k + 1] = perm[k::-1] 41 | flips_count += 1 42 | k = perm[0] 43 | 44 | if flips_count > max_flips: 45 | max_flips = flips_count 46 | 47 | while r != n: 48 | perm1_ins(r, perm1_pop(0)) 49 | count[r] -= 1 50 | if count[r] > 0: 51 | break 52 | r += 1 53 | else: 54 | return max_flips 55 | 56 | 57 | if __name__ == "__main__": 58 | runner = pyperf.Runner() 59 | arg = DEFAULT_ARG 60 | runner.bench_func("fannkuch", fannkuch, arg) 61 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/json_dumps_base.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | 4 | 5 | EMPTY = ({}, 2000) 6 | SIMPLE_DATA = { 7 | "key1": 0, 8 | "key2": True, 9 | "key3": "value", 10 | "key4": "foo", 11 | "key5": "string", 12 | } 13 | SIMPLE = (SIMPLE_DATA, 1000) 14 | NESTED_DATA = { 15 | "key1": 0, 16 | "key2": SIMPLE[0], 17 | "key3": "value", 18 | "key4": SIMPLE[0], 19 | "key5": SIMPLE[0], 20 | "key": "\u0105\u0107\u017c", 21 | } 22 | NESTED = (NESTED_DATA, 1000) 23 | HUGE = ([NESTED[0]] * 1000, 1) 24 | 25 | CASES = ["EMPTY", "SIMPLE", "NESTED", "HUGE"] 26 | 27 | 28 | def bench_json_dumps(data): 29 | for obj, count_it in data: 30 | for _ in count_it: 31 | json.dumps(obj) 32 | 33 | 34 | def add_cmdline_args(cmd, args): 35 | if args.cases: 36 | cmd.extend(("--cases", args.cases)) 37 | 38 | 39 | def run_benchmark(): 40 | data = [] 41 | for case in CASES: 42 | obj, count = globals()[case] 43 | data.append((obj, range(count))) 44 | bench_json_dumps(data) 45 | 46 | 47 | def main(): 48 | run_benchmark() 49 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/json_dumps_memray.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | 4 | import pyperf 5 | from memray_helper import get_tracker 6 | 7 | 8 | EMPTY = ({}, 2000) 9 | SIMPLE_DATA = { 10 | "key1": 0, 11 | "key2": True, 12 | "key3": "value", 13 | "key4": "foo", 14 | "key5": "string", 15 | } 16 | SIMPLE = (SIMPLE_DATA, 1000) 17 | NESTED_DATA = { 18 | "key1": 0, 19 | "key2": SIMPLE[0], 20 | "key3": "value", 21 | "key4": SIMPLE[0], 22 | "key5": SIMPLE[0], 23 | "key": "\u0105\u0107\u017c", 24 | } 25 | NESTED = (NESTED_DATA, 1000) 26 | HUGE = ([NESTED[0]] * 1000, 1) 27 | 28 | CASES = ["EMPTY", "SIMPLE", "NESTED", "HUGE"] 29 | 30 | 31 | def bench_json_dumps(data): 32 | with get_tracker(): 33 | for obj, count_it in data: 34 | for _ in count_it: 35 | json.dumps(obj) 36 | 37 | 38 | def add_cmdline_args(cmd, args): 39 | if args.cases: 40 | cmd.extend(("--cases", args.cases)) 41 | 42 | 43 | def main(): 44 | runner = pyperf.Runner(add_cmdline_args=add_cmdline_args) 45 | runner.argparser.add_argument( 46 | "--cases", 47 | help="Comma separated list of cases. Available cases: %s. By default, run all cases." 48 | % ", ".join(CASES), 49 | ) 50 | runner.metadata["description"] = "Benchmark json.dumps()" 51 | 52 | args = runner.parse_args() 53 | if args.cases: 54 | cases = [] 55 | for case in args.cases.split(","): 56 | case = case.strip() 57 | if case: 58 | cases.append(case) 59 | if not cases: 60 | print("ERROR: empty list of cases") 61 | sys.exit(1) 62 | else: 63 | cases = CASES 64 | 65 | data = [] 66 | for case in cases: 67 | obj, count = globals()[case] 68 | data.append((obj, range(count))) 69 | 70 | runner.bench_func("json_dumps", bench_json_dumps, data) 71 | 72 | 73 | if __name__ == "__main__": 74 | main() 75 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/nqueens_base.py: -------------------------------------------------------------------------------- 1 | """Simple, brute-force N-Queens solver.""" 2 | 3 | __author__ = "collinwinter@google.com (Collin Winter)" 4 | 5 | 6 | # Pure-Python implementation of itertools.permutations(). 7 | def permutations(iterable, r=None): 8 | """permutations(range(3), 2) --> (0,1) (0,2) (1,0) (1,2) (2,0) (2,1)""" 9 | pool = tuple(iterable) 10 | n = len(pool) 11 | if r is None: 12 | r = n 13 | indices = list(range(n)) 14 | cycles = list(range(n - r + 1, n + 1))[::-1] 15 | yield tuple(pool[i] for i in indices[:r]) 16 | while n: 17 | for i in reversed(range(r)): 18 | cycles[i] -= 1 19 | if cycles[i] == 0: 20 | indices[i:] = indices[i + 1 :] + indices[i : i + 1] 21 | cycles[i] = n - i 22 | else: 23 | j = cycles[i] 24 | indices[i], indices[-j] = indices[-j], indices[i] 25 | yield tuple(pool[i] for i in indices[:r]) 26 | break 27 | else: 28 | return 29 | 30 | 31 | # From http://code.activestate.com/recipes/576647/ 32 | def n_queens(queen_count): 33 | """N-Queens solver. 34 | 35 | Args: 36 | queen_count: the number of queens to solve for. This is also the 37 | board size. 38 | 39 | Yields: 40 | Solutions to the problem. Each yielded value is looks like 41 | (3, 8, 2, 1, 4, ..., 6) where each number is the column position for the 42 | queen, and the index into the tuple indicates the row. 43 | """ 44 | cols = range(queen_count) 45 | for vec in permutations(cols): 46 | if ( 47 | queen_count 48 | == len(set(vec[i] + i for i in cols)) 49 | == len(set(vec[i] - i for i in cols)) 50 | ): 51 | yield vec 52 | 53 | 54 | def bench_n_queens(queen_count): 55 | list(n_queens(queen_count)) 56 | 57 | 58 | def run_benchmark(): 59 | bench_n_queens(8) 60 | 61 | 62 | if __name__ == "__main__": 63 | run_benchmark() 64 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/nqueens_memray.py: -------------------------------------------------------------------------------- 1 | """Simple, brute-force N-Queens solver.""" 2 | 3 | import pyperf 4 | from memray_helper import get_tracker 5 | 6 | __author__ = "collinwinter@google.com (Collin Winter)" 7 | 8 | 9 | # Pure-Python implementation of itertools.permutations(). 10 | def permutations(iterable, r=None): 11 | """permutations(range(3), 2) --> (0,1) (0,2) (1,0) (1,2) (2,0) (2,1)""" 12 | pool = tuple(iterable) 13 | n = len(pool) 14 | if r is None: 15 | r = n 16 | indices = list(range(n)) 17 | cycles = list(range(n - r + 1, n + 1))[::-1] 18 | yield tuple(pool[i] for i in indices[:r]) 19 | while n: 20 | for i in reversed(range(r)): 21 | cycles[i] -= 1 22 | if cycles[i] == 0: 23 | indices[i:] = indices[i + 1 :] + indices[i : i + 1] 24 | cycles[i] = n - i 25 | else: 26 | j = cycles[i] 27 | indices[i], indices[-j] = indices[-j], indices[i] 28 | yield tuple(pool[i] for i in indices[:r]) 29 | break 30 | else: 31 | return 32 | 33 | 34 | # From http://code.activestate.com/recipes/576647/ 35 | def n_queens(queen_count): 36 | """N-Queens solver. 37 | 38 | Args: 39 | queen_count: the number of queens to solve for. This is also the 40 | board size. 41 | 42 | Yields: 43 | Solutions to the problem. Each yielded value is looks like 44 | (3, 8, 2, 1, 4, ..., 6) where each number is the column position for the 45 | queen, and the index into the tuple indicates the row. 46 | """ 47 | cols = range(queen_count) 48 | for vec in permutations(cols): 49 | if ( 50 | queen_count 51 | == len(set(vec[i] + i for i in cols)) 52 | == len(set(vec[i] - i for i in cols)) 53 | ): 54 | yield vec 55 | 56 | 57 | def bench_n_queens(queen_count): 58 | with get_tracker(): 59 | list(n_queens(queen_count)) 60 | 61 | 62 | if __name__ == "__main__": 63 | runner = pyperf.Runner() 64 | runner.metadata["description"] = "Simple, brute-force N-Queens solver" 65 | 66 | queen_count = 8 67 | runner.bench_func("nqueens", bench_n_queens, queen_count) 68 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/pprint_format_base.py: -------------------------------------------------------------------------------- 1 | """Test the performance of pprint.PrettyPrinter. 2 | 3 | This benchmark was available as `python -m pprint` until Python 3.12. 4 | 5 | Authors: Fred Drake (original), Oleg Iarygin (pyperformance port). 6 | """ 7 | 8 | from pprint import PrettyPrinter 9 | 10 | printable = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100_000 11 | p = PrettyPrinter() 12 | 13 | 14 | def run_benchmark(): 15 | if hasattr(p, "_safe_repr"): 16 | p._safe_repr(printable, {}, None, 0) 17 | p.pformat(printable) 18 | 19 | 20 | if __name__ == "__main__": 21 | run_benchmark() 22 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/pprint_format_memray.py: -------------------------------------------------------------------------------- 1 | """Test the performance of pprint.PrettyPrinter. 2 | 3 | This benchmark was available as `python -m pprint` until Python 3.12. 4 | 5 | Authors: Fred Drake (original), Oleg Iarygin (pyperformance port). 6 | """ 7 | 8 | import pyperf 9 | from pprint import PrettyPrinter 10 | from memray_helper import get_tracker 11 | 12 | 13 | printable = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100_000 14 | p = PrettyPrinter() 15 | 16 | 17 | def format(*args, **kwrags): 18 | with get_tracker(): 19 | p.pformat(*args, **kwrags) 20 | 21 | 22 | def safe_repr(*args, **kwargs): 23 | with get_tracker(): 24 | p._safe_repr(*args, **kwargs) 25 | 26 | 27 | if __name__ == "__main__": 28 | runner = pyperf.Runner() 29 | runner.metadata["description"] = "pprint benchmark" 30 | 31 | if hasattr(p, "_safe_repr"): 32 | runner.bench_func("pprint_safe_repr", safe_repr, printable, {}, None, 0) 33 | runner.bench_func("pprint_pformat", format, printable) 34 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/spectral_norm_base.py: -------------------------------------------------------------------------------- 1 | """ 2 | MathWorld: "Hundred-Dollar, Hundred-Digit Challenge Problems", Challenge #3. 3 | http://mathworld.wolfram.com/Hundred-DollarHundred-DigitChallengeProblems.html 4 | 5 | The Computer Language Benchmarks Game 6 | http://benchmarksgame.alioth.debian.org/u64q/spectralnorm-description.html#spectralnorm 7 | 8 | Contributed by Sebastien Loisel 9 | Fixed by Isaac Gouy 10 | Sped up by Josh Goldfoot 11 | Dirtily sped up by Simon Descarpentries 12 | Concurrency by Jason Stitt 13 | """ 14 | 15 | 16 | DEFAULT_N = 130 17 | 18 | 19 | def eval_A(i, j): 20 | return 1.0 / ((i + j) * (i + j + 1) // 2 + i + 1) 21 | 22 | 23 | def eval_times_u(func, u): 24 | return [func((i, u)) for i in range(len(list(u)))] 25 | 26 | 27 | def eval_AtA_times_u(u): 28 | return eval_times_u(part_At_times_u, eval_times_u(part_A_times_u, u)) 29 | 30 | 31 | def part_A_times_u(i_u): 32 | i, u = i_u 33 | partial_sum = 0 34 | for j, u_j in enumerate(u): 35 | partial_sum += eval_A(i, j) * u_j 36 | return partial_sum 37 | 38 | 39 | def part_At_times_u(i_u): 40 | i, u = i_u 41 | partial_sum = 0 42 | for j, u_j in enumerate(u): 43 | partial_sum += eval_A(j, i) * u_j 44 | return partial_sum 45 | 46 | 47 | def bench_spectral_norm(loops): 48 | range_it = range(loops) 49 | for _ in range_it: 50 | u = [1] * DEFAULT_N 51 | 52 | for dummy in range(10): 53 | v = eval_AtA_times_u(u) 54 | u = eval_AtA_times_u(v) 55 | 56 | vBv = vv = 0 57 | 58 | for ue, ve in zip(u, v): 59 | vBv += ue * ve 60 | vv += ve * ve 61 | 62 | 63 | def run_benchmark(): 64 | bench_spectral_norm(1) 65 | 66 | 67 | if __name__ == "__main__": 68 | run_benchmark() 69 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/spectral_norm_memray.py: -------------------------------------------------------------------------------- 1 | """ 2 | MathWorld: "Hundred-Dollar, Hundred-Digit Challenge Problems", Challenge #3. 3 | http://mathworld.wolfram.com/Hundred-DollarHundred-DigitChallengeProblems.html 4 | 5 | The Computer Language Benchmarks Game 6 | http://benchmarksgame.alioth.debian.org/u64q/spectralnorm-description.html#spectralnorm 7 | 8 | Contributed by Sebastien Loisel 9 | Fixed by Isaac Gouy 10 | Sped up by Josh Goldfoot 11 | Dirtily sped up by Simon Descarpentries 12 | Concurrency by Jason Stitt 13 | """ 14 | 15 | import pyperf 16 | from memray_helper import get_tracker 17 | 18 | 19 | DEFAULT_N = 130 20 | 21 | 22 | def eval_A(i, j): 23 | return 1.0 / ((i + j) * (i + j + 1) // 2 + i + 1) 24 | 25 | 26 | def eval_times_u(func, u): 27 | return [func((i, u)) for i in range(len(list(u)))] 28 | 29 | 30 | def eval_AtA_times_u(u): 31 | return eval_times_u(part_At_times_u, eval_times_u(part_A_times_u, u)) 32 | 33 | 34 | def part_A_times_u(i_u): 35 | i, u = i_u 36 | partial_sum = 0 37 | for j, u_j in enumerate(u): 38 | partial_sum += eval_A(i, j) * u_j 39 | return partial_sum 40 | 41 | 42 | def part_At_times_u(i_u): 43 | i, u = i_u 44 | partial_sum = 0 45 | for j, u_j in enumerate(u): 46 | partial_sum += eval_A(j, i) * u_j 47 | return partial_sum 48 | 49 | 50 | def bench_spectral_norm(loops): 51 | range_it = range(loops) 52 | 53 | with get_tracker(): 54 | t0 = pyperf.perf_counter() 55 | for _ in range_it: 56 | u = [1] * DEFAULT_N 57 | 58 | for dummy in range(10): 59 | v = eval_AtA_times_u(u) 60 | u = eval_AtA_times_u(v) 61 | 62 | vBv = vv = 0 63 | 64 | for ue, ve in zip(u, v): 65 | vBv += ue * ve 66 | vv += ve * ve 67 | 68 | return pyperf.perf_counter() - t0 69 | 70 | 71 | if __name__ == "__main__": 72 | runner = pyperf.Runner() 73 | runner.metadata["description"] = ( 74 | 'MathWorld: "Hundred-Dollar, Hundred-Digit Challenge Problems", ' 75 | "Challenge #3." 76 | ) 77 | runner.bench_time_func("spectral_norm", bench_spectral_norm) 78 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/sqlite_synth_base.py: -------------------------------------------------------------------------------- 1 | """ 2 | SQLite benchmark. 3 | 4 | The goal of the benchmark is to test CFFI performance and going back and forth 5 | between SQLite and Python a lot. Therefore the queries themselves are really 6 | simple. 7 | """ 8 | 9 | import sqlite3 10 | import math 11 | 12 | 13 | class AvgLength(object): 14 | def __init__(self): 15 | self.sum = 0 16 | self.count = 0 17 | 18 | def step(self, x): 19 | if x is not None: 20 | self.count += 1 21 | self.sum += len(x) 22 | 23 | def finalize(self): 24 | return self.sum / float(self.count) 25 | 26 | 27 | def bench_sqlite(loops): 28 | conn = sqlite3.connect(":memory:") 29 | conn.execute("create table cos (x, y, z);") 30 | for i in range(loops): 31 | cos_i = math.cos(i) 32 | conn.execute("insert into cos values (?, ?, ?)", [i, cos_i, str(i)]) 33 | 34 | conn.create_function("cos", 1, math.cos) 35 | for x, cosx1, cosx2 in conn.execute("select x, cos(x), y from cos"): 36 | assert math.cos(x) == cosx1 == cosx2 37 | 38 | conn.create_aggregate("avglength", 1, AvgLength) 39 | cursor = conn.execute("select avglength(z) from cos;") 40 | cursor.fetchone()[0] 41 | 42 | conn.execute("delete from cos;") 43 | conn.close() 44 | 45 | 46 | def run_benchmark(): 47 | bench_sqlite(1) 48 | 49 | 50 | if __name__ == "__main__": 51 | run_benchmark() 52 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/sqlite_synth_memray.py: -------------------------------------------------------------------------------- 1 | """ 2 | SQLite benchmark. 3 | 4 | The goal of the benchmark is to test CFFI performance and going back and forth 5 | between SQLite and Python a lot. Therefore the queries themselves are really 6 | simple. 7 | """ 8 | 9 | import sqlite3 10 | import math 11 | 12 | import pyperf 13 | from memray_helper import get_tracker 14 | 15 | 16 | class AvgLength(object): 17 | def __init__(self): 18 | self.sum = 0 19 | self.count = 0 20 | 21 | def step(self, x): 22 | if x is not None: 23 | self.count += 1 24 | self.sum += len(x) 25 | 26 | def finalize(self): 27 | return self.sum / float(self.count) 28 | 29 | 30 | def bench_sqlite(loops): 31 | 32 | with get_tracker(): 33 | t0 = pyperf.perf_counter() 34 | conn = sqlite3.connect(":memory:") 35 | conn.execute("create table cos (x, y, z);") 36 | for i in range(loops): 37 | cos_i = math.cos(i) 38 | conn.execute("insert into cos values (?, ?, ?)", [i, cos_i, str(i)]) 39 | 40 | conn.create_function("cos", 1, math.cos) 41 | for x, cosx1, cosx2 in conn.execute("select x, cos(x), y from cos"): 42 | assert math.cos(x) == cosx1 == cosx2 43 | 44 | conn.create_aggregate("avglength", 1, AvgLength) 45 | cursor = conn.execute("select avglength(z) from cos;") 46 | cursor.fetchone()[0] 47 | 48 | conn.execute("delete from cos;") 49 | conn.close() 50 | 51 | return pyperf.perf_counter() - t0 52 | 53 | 54 | if __name__ == "__main__": 55 | runner = pyperf.Runner() 56 | runner.metadata["description"] = "Benchmark Python aggregate for SQLite" 57 | runner.bench_time_func("sqlite_synth", bench_sqlite) 58 | -------------------------------------------------------------------------------- /benchmarks/benchmarking/cases/telco_data/telco-bench.b: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/benchmarks/benchmarking/cases/telco_data/telco-bench.b -------------------------------------------------------------------------------- /benchmarks/plot_defaut.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/benchmarks/plot_defaut.png -------------------------------------------------------------------------------- /benchmarks/plot_native.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/benchmarks/plot_native.png -------------------------------------------------------------------------------- /benchmarks/plot_python_allocators.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/benchmarks/plot_python_allocators.png -------------------------------------------------------------------------------- /benchmarks/plot_python_allocators_+_native.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/benchmarks/plot_python_allocators_+_native.png -------------------------------------------------------------------------------- /benchmarks/requirements.txt: -------------------------------------------------------------------------------- 1 | pyperf 2 | matplotlib 3 | seaborn 4 | numpy 5 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | SPHINXAUTOBUILD = sphinx-autobuild 5 | 6 | # You can set these variables from the command line. 7 | SPHINXOPTS = -n -W --keep-going 8 | SPHINXBUILD = sphinx-build 9 | SPHINXPROJ = memray 10 | SOURCEDIR = . 11 | BUILDDIR = _build 12 | 13 | # Internal variables. 14 | PAPEROPT_a4 = -D latex_paper_size=a4 15 | PAPEROPT_letter = -D latex_paper_size=letter 16 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 17 | 18 | .PHONY: clean 19 | clean: 20 | rm -rf $(BUILDDIR)/* 21 | 22 | .PHONY: html 23 | html: 24 | $(SPHINXBUILD) -W -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 25 | @echo 26 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 27 | 28 | .PHONY: man 29 | man: 30 | $(SPHINXBUILD) -b man -D exclude_patterns= $(ALLSPHINXOPTS) $(BUILDDIR)/man 31 | @echo 32 | @echo "Build finished. The man pages are in $(BUILDDIR)/man." 33 | 34 | .PHONY: livehtml 35 | livehtml: 36 | $(SPHINXAUTOBUILD) -a $(ALLSPHINXOPTS) $(BUILDDIR)/html 37 | -------------------------------------------------------------------------------- /docs/_static/flamegraphs/.gitattributes: -------------------------------------------------------------------------------- 1 | *.html -diff 2 | -------------------------------------------------------------------------------- /docs/_static/images/circle_inner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/circle_inner.png -------------------------------------------------------------------------------- /docs/_static/images/circle_inner_opt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/circle_inner_opt.png -------------------------------------------------------------------------------- /docs/_static/images/circle_outer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/circle_outer.png -------------------------------------------------------------------------------- /docs/_static/images/circle_outer_opt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/circle_outer_opt.png -------------------------------------------------------------------------------- /docs/_static/images/codespaces_testing_tab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/codespaces_testing_tab.png -------------------------------------------------------------------------------- /docs/_static/images/complex_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/complex_example.png -------------------------------------------------------------------------------- /docs/_static/images/exercise1_flamegraph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/exercise1_flamegraph.png -------------------------------------------------------------------------------- /docs/_static/images/exercise2_flamegraph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/exercise2_flamegraph.png -------------------------------------------------------------------------------- /docs/_static/images/exercise3_flamegraph_basic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/exercise3_flamegraph_basic.png -------------------------------------------------------------------------------- /docs/_static/images/exercise3_flamegraph_native.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/exercise3_flamegraph_native.png -------------------------------------------------------------------------------- /docs/_static/images/filter_thread_dropdown.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/filter_thread_dropdown.png -------------------------------------------------------------------------------- /docs/_static/images/flamegraph_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/flamegraph_example.png -------------------------------------------------------------------------------- /docs/_static/images/gprof2dot_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/gprof2dot_example.png -------------------------------------------------------------------------------- /docs/_static/images/icicle_flame_toggle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/icicle_flame_toggle.png -------------------------------------------------------------------------------- /docs/_static/images/inverted_flame_graph_for_inverted_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/inverted_flame_graph_for_inverted_example.png -------------------------------------------------------------------------------- /docs/_static/images/live_animated.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/live_animated.webp -------------------------------------------------------------------------------- /docs/_static/images/live_different_thread.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/live_different_thread.png -------------------------------------------------------------------------------- /docs/_static/images/live_disconnected.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/live_disconnected.png -------------------------------------------------------------------------------- /docs/_static/images/live_running.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/live_running.png -------------------------------------------------------------------------------- /docs/_static/images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/logo.png -------------------------------------------------------------------------------- /docs/_static/images/mandelbrot_operation_native.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/mandelbrot_operation_native.png -------------------------------------------------------------------------------- /docs/_static/images/mandelbrot_operation_non_native.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/mandelbrot_operation_non_native.png -------------------------------------------------------------------------------- /docs/_static/images/memray.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/memray.png -------------------------------------------------------------------------------- /docs/_static/images/native_mode_debug.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/native_mode_debug.png -------------------------------------------------------------------------------- /docs/_static/images/native_mode_no_debug.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/native_mode_no_debug.png -------------------------------------------------------------------------------- /docs/_static/images/non_relevant_checkbox.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/non_relevant_checkbox.png -------------------------------------------------------------------------------- /docs/_static/images/normal_flamegraph_for_inverted_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/normal_flamegraph_for_inverted_example.png -------------------------------------------------------------------------------- /docs/_static/images/output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/output.png -------------------------------------------------------------------------------- /docs/_static/images/plot_default_options.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/plot_default_options.png -------------------------------------------------------------------------------- /docs/_static/images/plot_defaut.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/plot_defaut.png -------------------------------------------------------------------------------- /docs/_static/images/plot_native.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/plot_native.png -------------------------------------------------------------------------------- /docs/_static/images/plot_python_allocators.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/plot_python_allocators.png -------------------------------------------------------------------------------- /docs/_static/images/plot_python_allocators_+_native.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/plot_python_allocators_+_native.png -------------------------------------------------------------------------------- /docs/_static/images/plot_python_allocators_native.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/plot_python_allocators_native.png -------------------------------------------------------------------------------- /docs/_static/images/ports_tab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/ports_tab.png -------------------------------------------------------------------------------- /docs/_static/images/pymalloc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/pymalloc.png -------------------------------------------------------------------------------- /docs/_static/images/pytest_cli_output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/pytest_cli_output.png -------------------------------------------------------------------------------- /docs/_static/images/quotes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/quotes.png -------------------------------------------------------------------------------- /docs/_static/images/rss_vs_heap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/rss_vs_heap.png -------------------------------------------------------------------------------- /docs/_static/images/rss_vs_heap_no_free.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/rss_vs_heap_no_free.png -------------------------------------------------------------------------------- /docs/_static/images/simple_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/simple_example.png -------------------------------------------------------------------------------- /docs/_static/images/stats_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/stats_example.png -------------------------------------------------------------------------------- /docs/_static/images/summary_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/summary_example.png -------------------------------------------------------------------------------- /docs/_static/images/table_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/table_example.png -------------------------------------------------------------------------------- /docs/_static/images/temporal_high_water_mark_controls.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/temporal_high_water_mark_controls.png -------------------------------------------------------------------------------- /docs/_static/images/tree_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/_static/images/tree_example.png -------------------------------------------------------------------------------- /docs/_static/js/custom.js: -------------------------------------------------------------------------------- 1 | function reveal() { 2 | var classes = [".reveal_l", ".reveal_r"]; 3 | classes.map(function (c) { 4 | var reveals = document.querySelectorAll(c); 5 | for (var i = 0; i < reveals.length; i++) { 6 | var windowHeight = window.innerHeight; 7 | var elementTop = reveals[i].getBoundingClientRect().top; 8 | var elementVisible = 150; 9 | if (elementTop < windowHeight - elementVisible) { 10 | reveals[i].classList.add("active"); 11 | } else { 12 | reveals[i].classList.remove("active"); 13 | } 14 | } 15 | }); 16 | } 17 | window.addEventListener("scroll", reveal); 18 | // To check the scroll position on page load 19 | reveal(); 20 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | ../NEWS.rst -------------------------------------------------------------------------------- /docs/examples/README.rst: -------------------------------------------------------------------------------- 1 | .. _example-applications: 2 | 3 | Example Applications for Memray 4 | ================================= 5 | 6 | The projects in the directories located here contain very simple 7 | examples to demonstrate usage with Memray. 8 | 9 | Make sure you install the required dependencies by running 10 | ``python3.9 -m pip install -r requirements.txt`` in the respective 11 | directory. The examples below use the project in the ``mandelbrot`` folder, but 12 | you can use the same instructions to launch the other examples as well. 13 | 14 | To track memory allocations, invoke ``memray3.9 run``: 15 | 16 | .. code:: shell 17 | 18 | memray3.9 run mandelbrot/mandelbrot.py 19 | 20 | Memray will print a message displaying the output file it creates. 21 | 22 | .. code:: text 23 | 24 | Writing profile results into mandelbrot/memray-mandelbrot.py.187967.bin 25 | 26 | You can use this file to create various reports. To generate a flame 27 | graph, use the following command: 28 | 29 | .. code:: shell 30 | 31 | memray3.9 flamegraph mandelbrot/memray-mandelbrot.py.187967.bin 32 | 33 | The HTML file for the flame graph will be generated under 34 | ``mandelbrot/memray-flamegraph-mandelbrot.py.187967.html``. The flame graph 35 | displays the stack frames in the application where allocations have occurred 36 | and shows the amount of memory used in each frame. 37 | 38 | You can see sample outputs of the resulting flame graphs: 39 | 40 | - `Mandelbrot <../_static/flamegraphs/memray-flamegraph-mandelbrot.html>`_ 41 | - `Nbody <../_static/flamegraphs/memray-flamegraph-nbody.html>`_ 42 | - `SQLite <../_static/flamegraphs/memray-flamegraph-sqlite.html>`_ 43 | - `Fibonacci <../_static/flamegraphs/memray-flamegraph-fib.html>`_ 44 | -------------------------------------------------------------------------------- /docs/examples/fibonacci/fib.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | 4 | def fib1(n): 5 | my_list = [0, 1] 6 | for i in range(2, n + 1): 7 | my_list.append(my_list[i - 1] + my_list[i - 2]) 8 | return my_list[-1] 9 | 10 | 11 | def fib2(n, cache={0: 0, 1: 1}): 12 | if n in cache: 13 | return cache[n] 14 | cache[n] = fib2(n - 1) + fib2(n - 2) 15 | return cache[n] 16 | 17 | 18 | def run(): 19 | sys.setrecursionlimit(100000) 20 | n = 99900 21 | a = fib1(n) 22 | b = fib2(n) 23 | 24 | assert a == b 25 | 26 | 27 | run() 28 | -------------------------------------------------------------------------------- /docs/examples/mandelbrot/mandelbrot-threaded.py: -------------------------------------------------------------------------------- 1 | from threading import Thread 2 | 3 | import numpy as np 4 | 5 | 6 | def mandelbrot(height, width, x=-0.5, y=0, zoom=1, max_iterations=100): 7 | # To make navigation easier we calculate these values 8 | x_width = 1.5 9 | y_height = 1.5 * height / width 10 | x_from = x - x_width / zoom 11 | x_to = x + x_width / zoom 12 | y_from = y - y_height / zoom 13 | y_to = y + y_height / zoom 14 | # Here the actual algorithm starts 15 | x = np.linspace(x_from, x_to, width).reshape((1, width)) 16 | y = np.linspace(y_from, y_to, height).reshape((height, 1)) 17 | c = x + 1j * y 18 | # Initialize z to all zero 19 | z = np.zeros(c.shape, dtype=np.complex128) 20 | # To keep track in which iteration the point diverged 21 | div_time = np.zeros(z.shape, dtype=int) 22 | # To keep track on which points did not converge so far 23 | m = np.full(c.shape, True, dtype=bool) 24 | for i in range(max_iterations): 25 | z[m] = z[m] ** 2 + c[m] 26 | diverged = np.greater( 27 | np.abs(z), 2, out=np.full(c.shape, False), where=m 28 | ) # Find diverging 29 | div_time[diverged] = i # set the value of the diverged iteration number 30 | m[np.abs(z) > 2] = False # to remember which have diverged 31 | return div_time 32 | 33 | 34 | if __name__ == "__main__": 35 | t1 = Thread(target=mandelbrot, args=(800, 1000)) 36 | t1.start() 37 | t2 = Thread(target=mandelbrot, args=(800, 1000)) 38 | t2.start() 39 | t3 = Thread(target=mandelbrot, args=(800, 1000)) 40 | t3.start() 41 | t3.join() 42 | -------------------------------------------------------------------------------- /docs/examples/mandelbrot/mandelbrot.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def mandelbrot(height, width, x=-0.5, y=0, zoom=1, max_iterations=100): 5 | # To make navigation easier we calculate these values 6 | x_width = 1.5 7 | y_height = 1.5 * height / width 8 | x_from = x - x_width / zoom 9 | x_to = x + x_width / zoom 10 | y_from = y - y_height / zoom 11 | y_to = y + y_height / zoom 12 | # Here the actual algorithm starts 13 | x = np.linspace(x_from, x_to, width).reshape((1, width)) 14 | y = np.linspace(y_from, y_to, height).reshape((height, 1)) 15 | c = x + 1j * y 16 | # Initialize z to all zero 17 | z = np.zeros(c.shape, dtype=np.complex128) 18 | # To keep track in which iteration the point diverged 19 | div_time = np.zeros(z.shape, dtype=int) 20 | # To keep track on which points did not converge so far 21 | m = np.full(c.shape, True, dtype=bool) 22 | for i in range(max_iterations): 23 | z[m] = z[m] ** 2 + c[m] 24 | diverged = np.greater( 25 | np.abs(z), 2, out=np.full(c.shape, False), where=m 26 | ) # Find diverging 27 | div_time[diverged] = i # set the value of the diverged iteration number 28 | m[np.abs(z) > 2] = False # to remember which have diverged 29 | return div_time 30 | 31 | 32 | if __name__ == "__main__": 33 | mandelbrot(800, 1000) 34 | -------------------------------------------------------------------------------- /docs/examples/mandelbrot/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | -------------------------------------------------------------------------------- /docs/examples/nbody/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | rebound 3 | -------------------------------------------------------------------------------- /docs/examples/sqlite/example.py: -------------------------------------------------------------------------------- 1 | import random 2 | import sqlite3 3 | import string 4 | import time 5 | 6 | create_statement = """ 7 | CREATE TABLE IF NOT EXISTS database_threading_test 8 | ( 9 | symbol TEXT, 10 | ts INTEGER, 11 | o REAL, 12 | h REAL, 13 | l REAL, 14 | c REAL, 15 | vf REAL, 16 | vt REAL, 17 | PRIMARY KEY(symbol, ts) 18 | ) 19 | """ 20 | insert_statement = "INSERT INTO database_threading_test VALUES(?,?,?,?,?,?,?,?)" 21 | select_statement = "SELECT * from database_threading_test" 22 | 23 | 24 | def generate_values(count=100): 25 | end = int(time.time()) - int(time.time()) % 900 26 | symbol = "".join( 27 | random.choice(string.ascii_uppercase + string.digits) for _ in range(10) 28 | ) 29 | ts = list(range(end - count * 900, end, 900)) 30 | for i in range(count): 31 | yield ( 32 | symbol, 33 | ts[i], 34 | random.random() * 1000, 35 | random.random() * 1000, 36 | random.random() * 1000, 37 | random.random() * 1000, 38 | random.random() * 1e9, 39 | random.random() * 1e5, 40 | ) 41 | 42 | 43 | def generate_values_list(symbols=1000, count=100): 44 | values = [] 45 | for _ in range(symbols): 46 | values.extend(generate_values(count)) 47 | return values 48 | 49 | 50 | def main(): 51 | lst = generate_values_list() 52 | conn = sqlite3.connect(":memory:") 53 | with conn: 54 | conn.execute(create_statement) 55 | conn.executemany(insert_statement, lst) 56 | results = conn.execute(select_statement).fetchall() 57 | print(f"There are {len(results)} items in teh db") 58 | 59 | 60 | if __name__ == "__main__": 61 | main() 62 | -------------------------------------------------------------------------------- /docs/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/favicon.ico -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. toctree:: 2 | :hidden: 3 | 4 | overview 5 | getting_started 6 | 7 | .. toctree:: 8 | :hidden: 9 | :caption: Hands-on Tutorial 10 | 11 | tutorials/index 12 | tutorials/1 13 | tutorials/2 14 | tutorials/3 15 | tutorials/additional_features 16 | 17 | .. toctree:: 18 | :hidden: 19 | :caption: Concepts 20 | 21 | run 22 | python_allocators 23 | memory 24 | temporary_allocations 25 | attach 26 | native_mode 27 | examples/README 28 | api 29 | jupyter_magic 30 | performance 31 | 32 | .. toctree:: 33 | :hidden: 34 | :caption: Reporters 35 | 36 | live 37 | summary 38 | flamegraph 39 | table 40 | tree 41 | stats 42 | transform 43 | 44 | .. toctree:: 45 | :hidden: 46 | :caption: Project Information 47 | 48 | supported_environments 49 | changelog 50 | licenses 51 | -------------------------------------------------------------------------------- /docs/jupyter_magic.rst: -------------------------------------------------------------------------------- 1 | .. _Jupyter integration: 2 | 3 | Jupyter Integration 4 | =================== 5 | 6 | We provide an IPython extension that adds a new Jupyter cell magic. This lets 7 | you create Memray flame graphs directly in Jupyter notebooks. 8 | 9 | memray_flamegraph 10 | ----------------- 11 | 12 | To load our IPython plugin, you simply need to run:: 13 | 14 | %load_ext memray 15 | 16 | Once it's loaded, you'll have access to the ``%%memray_flamegraph`` cell magic. 17 | You can fill a Jupyter cell with ``%%memray_flamegraph`` on its own line, 18 | followed by some code whose memory usage you want to profile. Memray will run 19 | that cell's code, tracking its memory allocations, and then display a flame 20 | graph directly in Jupyter for you to analyze. 21 | 22 | It's also possible to provide arguments on the ``%%memray_flamegraph`` line. 23 | For instance, ``%%memray_flamegraph --trace-python-allocators --leaks`` would 24 | let you look for memory not freed by the code in the cell:: 25 | 26 | %%memray_flamegraph --trace-python-allocators --leaks 27 | def a(): 28 | return "a" * 10_000 29 | 30 | def bc(): 31 | return "bc" * 10_000 32 | 33 | x = a() + bc() 34 | 35 | Arguments 36 | --------- 37 | 38 | .. argparse:: 39 | :ref: memray._ipython.flamegraph.argument_parser 40 | :prog: %%memray_flamegraph 41 | -------------------------------------------------------------------------------- /docs/overview.rst: -------------------------------------------------------------------------------- 1 | Overview 2 | -------- 3 | 4 | .. image:: _static/images/logo.png 5 | 6 | .. image:: _static/images/output.png 7 | 8 | Memray is a memory profiler for Python. It can track memory allocations in Python code, in native extension 9 | modules, and in the Python interpreter itself. It can generate several different types of reports to help you 10 | analyze the captured memory usage data. While commonly used as a CLI tool, it can also be used as a library to 11 | perform more fine-grained profiling tasks. 12 | 13 | 14 | Notable features: 15 | 16 | - 🕵️‍♀️ Traces every function call so it can accurately represent the call stack, unlike sampling profilers. 17 | - ℭ Also handles native calls in C/C++ libraries so the entire call stack is present in the results. 18 | - 🏎 Blazing fast! Profiling causes minimal slowdown in the application. Tracking native code is somewhat slower, but this can be enabled or disabled on demand. 19 | - 📈 It can generate various reports about the collected memory usage data, like flame graphs. 20 | - 🧵 Works with Python threads. 21 | - 👽🧵 Works with native-threads (e.g. C++ threads in native extensions) 22 | 23 | Memray can help with the following problems: 24 | 25 | - Analyze allocations in applications to help discover the cause of high memory usage. 26 | - Find memory leaks. 27 | - Find hotspots in code which cause a lot of allocations. 28 | 29 | 30 | .. image:: _static/images/quotes.png 31 | :align: center 32 | 33 | .. note:: 34 | Memray only works on Linux and MacOS, and cannot be installed on other platforms. 35 | -------------------------------------------------------------------------------- /docs/stats.rst: -------------------------------------------------------------------------------- 1 | Stats Reporter 2 | ============== 3 | 4 | The stats reporter generates high level statistics about the tracked process's 5 | memory allocations. 6 | 7 | .. image:: _static/images/stats_example.png 8 | 9 | The output includes the following: 10 | 11 | * Total number of allocations performed 12 | 13 | * Total amount of memory allocated 14 | 15 | * Histogram displaying the distribution of allocation sizes. The y-axis data (size) is logarithmic. 16 | 17 | * Distribution of allocation types (e.g. *MALLOC*, *CALLOC*, *MMAP*, etc.) 18 | 19 | * Stack trace and **size** of the top 'n' largest allocating locations by size (*default: 5*, configurable with the ``-n`` command line param) 20 | 21 | * Stack trace and **count** of the top 'n' largest allocating locations by number of allocations (*default: 5*, configurable with the ``-n`` command line param) 22 | 23 | * (for JSON output only) Metadata about the tracked process 24 | 25 | Basic Usage 26 | ----------- 27 | 28 | The general form of the ``stats`` subcommand is: 29 | 30 | .. code:: shell 31 | 32 | memray stats [options] 33 | 34 | The only argument the ``stats`` subcommand requires is the capture file 35 | previously generated using :doc:`the run subcommand `. 36 | 37 | The output will be printed directly to the standard output of the terminal. 38 | 39 | JSON Output 40 | ----------- 41 | 42 | If you supply the ``--json`` flag, the ``stats`` subcommand will write its 43 | output to a JSON file, rather than to the terminal. Like other commands that 44 | output to files, the default output file name is based on the name of your 45 | capture file, but it can be overridden with the ``-o`` / ``--output`` option. 46 | By default Memray will refuse to overwrite an existing file, but you can force 47 | it to by supplying the ``-f`` / ``--force`` option. 48 | 49 | Note that new fields may be added to the JSON output over time, though we'll 50 | try to avoid removing existing fields. 51 | 52 | CLI Reference 53 | ------------- 54 | 55 | .. argparse:: 56 | :ref: memray.commands.get_argument_parser 57 | :path: stats 58 | :prog: memray 59 | -------------------------------------------------------------------------------- /docs/summary.rst: -------------------------------------------------------------------------------- 1 | Summary Reporter 2 | ================ 3 | 4 | The summary reporter provides a quick overview of allocated memory at the time 5 | when the tracked process's memory usage was at its peak. It displays a table 6 | showing how much of that peak memory each function directly allocated ("own 7 | memory"), how much was cumulatively allocated by that function and everything 8 | it called ("total memory"), and the cumulative count of not yet freed 9 | allocations performed by that function and everything it called ("allocation 10 | count"). Allocations for different threads are aggregated together and shown in 11 | the same table. 12 | 13 | .. image:: _static/images/summary_example.png 14 | 15 | Basic Usage 16 | ----------- 17 | 18 | The general form of the ``summary`` subcommand is: 19 | 20 | .. code:: shell 21 | 22 | memray summary [options] 23 | 24 | The only argument the ``summary`` subcommand requires is the capture file 25 | previously generated using :doc:`the run subcommand `. 26 | 27 | The output will be printed directly to standard output. If standard output is 28 | a terminal, the output will be colorized. 29 | 30 | CLI Reference 31 | ------------- 32 | 33 | .. argparse:: 34 | :ref: memray.commands.get_argument_parser 35 | :path: summary 36 | :prog: memray 37 | -------------------------------------------------------------------------------- /docs/table.rst: -------------------------------------------------------------------------------- 1 | Table Reporter 2 | ============== 3 | 4 | The table reporter generates an HTML document showing a simple tabular view of 5 | the allocations contributing to the tracked process's peak memory usage. Each 6 | source line that contributed to that peak memory usage is given a row in the 7 | generated table, showing the amount of memory it allocated, the type of 8 | allocator it used, and the number of allocations it was responsible for. 9 | 10 | .. image:: _static/images/table_example.png 11 | 12 | 13 | The table can be sorted by each column and searched in the search field. The columns show the following data: 14 | 15 | - Thread ID: thread where the allocation happened 16 | - Size: total amount of memory used by all of these allocations 17 | - Allocator: allocator or deallocator function which acquired the memory 18 | - Allocations: total number of allocations performed by this entry 19 | - Location: function name, file and line of the allocation or "???" if unknown 20 | 21 | Basic Usage 22 | ----------- 23 | 24 | The general form of the ``table`` subcommand is: 25 | 26 | .. code:: shell 27 | 28 | memray table [options] 29 | 30 | The only argument the ``table`` subcommand requires is the capture file 31 | previously generated using :doc:`the run subcommand `. 32 | 33 | 34 | The output file will be named as ``memray-table-.html`` unless the ``-o`` argument was 35 | specified to override the default name. 36 | 37 | 38 | CLI Reference 39 | ------------- 40 | 41 | .. argparse:: 42 | :ref: memray.commands.get_argument_parser 43 | :path: table 44 | :prog: memray 45 | -------------------------------------------------------------------------------- /docs/tree.rst: -------------------------------------------------------------------------------- 1 | Tree Reporter 2 | ============== 3 | 4 | The tree reporter provides a simplified representation of the call hierarchy of 5 | the tracked process at the time when its memory usage was at its peak. 6 | 7 | .. image:: _static/images/tree_example.png 8 | 9 | The tree reporter shows an interactive terminal applocation displaying a tree 10 | representation of the allocated memory. Several aspects are important when 11 | interpreting the tree representation: 12 | 13 | * Only the 10 source locations responsible for the most allocated bytes are 14 | displayed. This is configurable with the ``--biggest-allocs`` command line 15 | parameter. 16 | * The total memory and percentage shown in the root node of the tree are 17 | calculated based only on the allocations that are shown. Since any allocation 18 | not big enough to be shown will not be included there, the reported total 19 | memory of the root node is normally less than the process's peak memory size. 20 | * The "📂" icon represents a frame that is a **caller** of a function where an 21 | allocation happened while the "📄" icon represents a frame that allocated 22 | memory. 23 | * Frames are colored based on their reported memory usage percentage, from red 24 | (most bytes allocated) to green (fewest). 25 | * You can interact with the application using the following keys: 26 | 27 | * You can navigate the tree using the arrow keys. Pressing the up arrow key 28 | will move up one level in the tree, while pressing the down arrow key will 29 | move down one row. When a new row is selected, the panel on the right 30 | will be updated to show the source code of the selected frame and some metadata 31 | about the allocations made by that frame and its children. 32 | * Pressing the 'e' key will expand nodes and their children recursively until a node with 33 | more than one child is found. This can be used to quickly expand the tree. 34 | * Pressing the 'i' key will hide all nodes that belong to the import system and their 35 | children. 36 | * Presing the 'u' key will show all nodes that are marked as "uninteresting". 37 | 38 | .. note:: 39 | If the ``textual[syntax]`` package is installed, the Python source files 40 | will be displayed with syntax highlighting! 41 | 42 | 43 | Basic Usage 44 | ----------- 45 | 46 | The general form of the ``tree`` subcommand is: 47 | 48 | .. code:: shell 49 | 50 | memray tree [options] 51 | 52 | The only argument the ``tree`` subcommand requires is the capture file 53 | previously generated using :doc:`the run subcommand `. 54 | 55 | The output will be printed directly to standard output. If standard output is 56 | a terminal, the output will be colorized. 57 | 58 | CLI Reference 59 | ------------- 60 | 61 | .. argparse:: 62 | :ref: memray.commands.get_argument_parser 63 | :path: tree 64 | :prog: memray 65 | -------------------------------------------------------------------------------- /docs/tutorials/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bookworm-slim 2 | 3 | ARG DEBIAN_FRONTEND=noninteractive 4 | 5 | RUN apt-get update \ 6 | && apt-get install -y --force-yes --no-install-recommends \ 7 | python3-dev \ 8 | python3-dbg \ 9 | python3-pip \ 10 | python3-venv \ 11 | && apt-get clean \ 12 | && rm -rf /var/lib/apt/lists/* 13 | 14 | ENV VIRTUAL_ENV=/venv \ 15 | PYTHONDONTWRITEBYTECODE=1 \ 16 | PATH=/bin:$PATH 17 | 18 | RUN python3 -m venv "$VIRTUAL_ENV" 19 | 20 | ENV PATH="${VIRTUAL_ENV}/bin:/usr/lib/ccache:${PATH}" \ 21 | PYTHON="${VIRTUAL_ENV}/bin/python" 22 | 23 | COPY requirements-tutorial.txt /tmp/ 24 | 25 | RUN $PYTHON -m pip install -U \ 26 | -r /tmp/requirements-tutorial.txt 27 | 28 | WORKDIR /src 29 | -------------------------------------------------------------------------------- /docs/tutorials/additional_features.rst: -------------------------------------------------------------------------------- 1 | What to learn about next 2 | ======================== 3 | 4 | We've now acquainted ourselves with Memray, and had a look at how it can be used in development 5 | workflows and for diagnosing unexpected memory issues. This section will briefly introduce you to 6 | a few more features offered by Memray which you can explore further in your own time. 7 | 8 | Essential Concepts 9 | ------------------ 10 | 11 | Check out the more detailed descriptions of the most essential concepts used in Memray by 12 | exploring the :doc:`concepts <../run>` section on the sidebar. It goes into detail about the 13 | ``memray`` subcommands and features available, as well as memory management in Python. 14 | 15 | pytest Plugin 16 | ------------- 17 | 18 | Memray offers a helpful pytest plugin, ``pytest-memray``, which has a couple of notable 19 | features: 20 | 21 | - ``@pytest.mark.limit_memory()`` marks tests as failed if the execution of said test allocates more 22 | memory than allowed. We used these markers throughout the unit tests in the three tutorial 23 | exercises. It will also print a helpful overview of which function calls used up the most memory 24 | for the failed test cases. 25 | - Running your tests as ``pytest --memray`` will generate a report with a high level overview of the 26 | memory allocated and will list a few top memory using functions. 27 | 28 | Try to utilize the plugin in your unit tests, and have them run as a part of your CI/CD pipeline. 29 | 30 | Read more about the memray pytest plugin in the `official documentation 31 | `_. 32 | 33 | Reporters 34 | --------- 35 | 36 | As a part of this study guide, we've worked with flame graphs. However, Memray offers numerous other 37 | types of reports for you to explore: 38 | 39 | - :doc:`Live Graphs <../live>` 40 | - :doc:`Summary Reporter <../summary>` 41 | - :doc:`Flame Graph Reporter <../flamegraph>` 42 | - :doc:`Table Reporter <../table>` 43 | - :doc:`Tree Reporter <../tree>` 44 | - :doc:`Stats Reporter <../stats>` 45 | - :doc:`Transform Reporter <../transform>` 46 | -------------------------------------------------------------------------------- /docs/tutorials/exercise_1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/tutorials/exercise_1/__init__.py -------------------------------------------------------------------------------- /docs/tutorials/exercise_1/fibonacci.py: -------------------------------------------------------------------------------- 1 | import operator 2 | from functools import reduce 3 | from itertools import chain 4 | 5 | 6 | def fibonacci(length): 7 | # edge cases 8 | if length < 1: 9 | return [] 10 | if length == 1: 11 | return [1] 12 | if length == 2: 13 | return [1, 1] 14 | 15 | output = [1, 1] 16 | 17 | for i in range(length - 2): 18 | output.append(output[i] + output[i + 1]) 19 | 20 | return output 21 | 22 | 23 | def generate_fibonacci_hash(length_1, length_2, length_3): 24 | # We could have used sum(...) here instead of reduce(operator.add, ...), 25 | # but we choose to use reduce since it yields a more descriptive example 26 | # of the generated flamegraph for this specific example 27 | return ( 28 | reduce( 29 | operator.add, 30 | chain(fibonacci(length_1), fibonacci(length_2), fibonacci(length_3)), 31 | 0, 32 | ) 33 | % 10000 34 | ) 35 | 36 | 37 | if __name__ == "__main__": 38 | # DO NOT CHANGE 39 | LENGTH_OF_SEQUENCE_1 = 33333 40 | LENGTH_OF_SEQUENCE_2 = 30000 41 | LENGTH_OF_SEQUENCE_3 = 34567 42 | # DO NOT CHANGE 43 | generate_fibonacci_hash( 44 | LENGTH_OF_SEQUENCE_1, LENGTH_OF_SEQUENCE_2, LENGTH_OF_SEQUENCE_3 45 | ) 46 | -------------------------------------------------------------------------------- /docs/tutorials/exercise_2/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/tutorials/exercise_2/__init__.py -------------------------------------------------------------------------------- /docs/tutorials/exercise_2/holding_onto_memory.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # DO NOT CHANGE 4 | MB_CONVERSION = 1024 * 1024 5 | DUPLICATE_CONST = 2 6 | 7 | SIZE_OF_DATA_IN_MB = 100 8 | SUBTRACT_AMOUNT = 3 9 | POWER_AMOUNT = 2 10 | ADD_AMOUNT = 10 11 | # DO NOT CHANGE 12 | 13 | 14 | def load_xMb_of_data(mb: int) -> np.ndarray: 15 | size = MB_CONVERSION * mb # DO NOT CHANGE 16 | return np.ones(size, dtype=np.uint8) 17 | 18 | 19 | def duplicate_data(data: np.ndarray) -> np.ndarray: 20 | return data * DUPLICATE_CONST 21 | 22 | 23 | def add_scalar(data: np.ndarray, scalar: int) -> np.ndarray: 24 | return data + scalar 25 | 26 | 27 | def subtract_scalar(data: np.ndarray, scalar: int) -> np.ndarray: 28 | return data - scalar 29 | 30 | 31 | def raise_to_power(data: np.ndarray, power: int) -> np.ndarray: 32 | return np.power(data, power) 33 | 34 | 35 | def process_data() -> np.ndarray: 36 | data = load_xMb_of_data(SIZE_OF_DATA_IN_MB) 37 | data = subtract_scalar(data, SUBTRACT_AMOUNT) 38 | data_pow = raise_to_power(data, POWER_AMOUNT) 39 | return add_scalar(duplicate_data(data_pow), ADD_AMOUNT) 40 | 41 | 42 | if __name__ == "__main__": 43 | process_data() 44 | -------------------------------------------------------------------------------- /docs/tutorials/exercise_3/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/tutorials/exercise_3/__init__.py -------------------------------------------------------------------------------- /docs/tutorials/exercise_3/lru_cache.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=C0114 C0115 C0116 R0903 C0103 2 | 3 | import functools 4 | from collections import Counter 5 | 6 | # DO NOT CHANGE 7 | FIRST_COUNTER_RANGE = 500 8 | SECOND_COUNTER_RANGE = 1000 9 | # DO NOT CHANGE 10 | 11 | 12 | class Algorithms: 13 | def __init__(self, inc: int): 14 | self.inc = inc 15 | 16 | @functools.cache # pylint: disable=W1518 17 | def factorial_plus(self, n: int) -> int: 18 | return n * self.factorial_plus(n - 1) + self.inc if n > 1 else 1 + self.inc 19 | 20 | 21 | def generate_factorial_plus_last_digit(plus_range: int, factorial_range: int): 22 | for i in range(plus_range): 23 | A = Algorithms(i) 24 | for j in range(factorial_range): 25 | yield A.factorial_plus(j) % 10 26 | 27 | 28 | def compare_counts_different_factorials(): 29 | counts_500 = Counter( 30 | generate_factorial_plus_last_digit(FIRST_COUNTER_RANGE, FIRST_COUNTER_RANGE) 31 | ) 32 | counts_1000 = Counter( 33 | generate_factorial_plus_last_digit(SECOND_COUNTER_RANGE, SECOND_COUNTER_RANGE) 34 | ) 35 | 36 | print(counts_500.most_common()) 37 | print(counts_1000.most_common()) 38 | 39 | 40 | if __name__ == "__main__": 41 | compare_counts_different_factorials() 42 | -------------------------------------------------------------------------------- /docs/tutorials/index.rst: -------------------------------------------------------------------------------- 1 | About This Tutorial 2 | =================== 3 | 4 | The tutorials in this section of the docs provide a gentle introduction to debugging memory issues using the Memray 5 | profiler. You'll learn how to use basic features like generating flame graphs and integrating Memray with pytest test 6 | suites, as well as some more advanced features like inspecting the C, C++, or Rust frames on the call stack in addition 7 | to the Python frames. 8 | 9 | There are 3 different exercises within this tutorial: 10 | - :doc:`Exercise 1 <1>` - Fibonacci Sequence 11 | - :doc:`Exercise 2 <2>` - Clinging Onto memory 12 | - :doc:`Exercise 3 <3>` - LRU Cache 13 | 14 | You should follow them in the order they're presented here, as each builds on concepts introduced by the last. 15 | -------------------------------------------------------------------------------- /docs/tutorials/requirements-tutorial.txt: -------------------------------------------------------------------------------- 1 | memray 2 | numpy 3 | pylint 4 | pytest 5 | pytest-memray 6 | -------------------------------------------------------------------------------- /docs/tutorials/solutions/exercise_1/fibonacci.py: -------------------------------------------------------------------------------- 1 | import operator 2 | from functools import reduce 3 | from itertools import chain 4 | 5 | 6 | def fibonacci(length): 7 | # edge cases 8 | if length < 1: 9 | return 10 | if length == 1: 11 | yield 1 12 | return 13 | 14 | left = right = 1 15 | yield left 16 | yield right 17 | 18 | for _ in range(length - 2): 19 | left, right = right, left + right 20 | yield right 21 | 22 | 23 | def generate_fibonacci_hash(length_1, length_2, length_3): 24 | # We could have used sum(...) here instead of reduce(operator.add, ...), 25 | # but we choose to use reduce since it yields a more descriptive example 26 | # of the generated flamegraph for this specific example 27 | return ( 28 | reduce( 29 | operator.add, 30 | chain(fibonacci(length_1), fibonacci(length_2), fibonacci(length_3)), 31 | 0, 32 | ) 33 | % 10000 34 | ) 35 | 36 | 37 | if __name__ == "__main__": 38 | # DO NOT CHANGE 39 | LENGTH_OF_SEQUENCE_1 = 33333 40 | LENGTH_OF_SEQUENCE_2 = 30000 41 | LENGTH_OF_SEQUENCE_3 = 34567 42 | # DO NOT CHANGE 43 | generate_fibonacci_hash( 44 | LENGTH_OF_SEQUENCE_1, LENGTH_OF_SEQUENCE_2, LENGTH_OF_SEQUENCE_3 45 | ) 46 | -------------------------------------------------------------------------------- /docs/tutorials/solutions/exercise_2/holding_onto_memory.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # DO NOT CHANGE 4 | MB_CONVERSION = 1024 * 1024 5 | DUPLICATE_CONST = 2 6 | 7 | SIZE_OF_DATA_IN_MB = 100 8 | SUBTRACT_AMOUNT = 3 9 | POWER_AMOUNT = 2 10 | ADD_AMOUNT = 10 11 | # DO NOT CHANGE 12 | 13 | 14 | def load_xMb_of_data(mb: int) -> np.ndarray: 15 | size = MB_CONVERSION * mb 16 | return np.ones(size, dtype=np.uint8) 17 | 18 | 19 | def duplicate_data(data: np.ndarray) -> np.ndarray: 20 | return data * DUPLICATE_CONST 21 | 22 | 23 | def add_scalar(data: np.ndarray, scalar: int) -> np.ndarray: 24 | return data + scalar 25 | 26 | 27 | def subtract_scalar(data: np.ndarray, scalar: int) -> np.ndarray: 28 | return data - scalar 29 | 30 | 31 | def raise_to_power(data: np.ndarray, power: int) -> np.ndarray: 32 | return np.power(data, power) 33 | 34 | 35 | def process_data_fix_1(): 36 | # no extra reference to the original array 37 | return add_scalar( 38 | duplicate_data( 39 | raise_to_power( 40 | subtract_scalar(load_xMb_of_data(SIZE_OF_DATA_IN_MB), SUBTRACT_AMOUNT), 41 | POWER_AMOUNT, 42 | ) 43 | ), 44 | ADD_AMOUNT, 45 | ) 46 | 47 | 48 | def process_data_fix_2(): 49 | # reusing the local variable instead of allocating more space 50 | # this approach is called 'hidden mutability' 51 | data = load_xMb_of_data(SIZE_OF_DATA_IN_MB) 52 | data = subtract_scalar(data, SUBTRACT_AMOUNT) 53 | data = raise_to_power(data, POWER_AMOUNT) 54 | data = duplicate_data(data) 55 | data = add_scalar(data, ADD_AMOUNT) 56 | return data 57 | 58 | 59 | # Use these to select which solution to test 60 | process_data = process_data_fix_1 61 | 62 | if __name__ == "__main__": 63 | process_data() 64 | -------------------------------------------------------------------------------- /docs/tutorials/solutions/exercise_3/lru_cache.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from collections import Counter 3 | 4 | # DO NOT CHANGE 5 | FIRST_COUNTER_RANGE = 500 6 | SECOND_COUNTER_RANGE = 1000 7 | # DO NOT CHANGE 8 | 9 | 10 | class Algorithms: 11 | def __init__(self, inc: int): 12 | self.inc = inc 13 | self.factorial_plus = functools.cache(self._uncached_factorial_plus) 14 | 15 | def _uncached_factorial_plus(self, n: int) -> int: 16 | return n * self.factorial_plus(n - 1) + self.inc if n > 1 else 1 + self.inc 17 | 18 | 19 | def generate_factorial_plus_last_digit(plus_range: int, factorial_range: int): 20 | for i in range(plus_range): 21 | A = Algorithms(i) 22 | for j in range(factorial_range): 23 | yield A.factorial_plus(j) % 10 24 | 25 | 26 | def compare_counts_different_factorials(): 27 | counts_500 = Counter( 28 | generate_factorial_plus_last_digit(FIRST_COUNTER_RANGE, FIRST_COUNTER_RANGE) 29 | ) 30 | counts_1000 = Counter( 31 | generate_factorial_plus_last_digit(SECOND_COUNTER_RANGE, SECOND_COUNTER_RANGE) 32 | ) 33 | print(counts_500.most_common()) 34 | print(counts_1000.most_common()) 35 | 36 | 37 | if __name__ == "__main__": 38 | compare_counts_different_factorials() 39 | -------------------------------------------------------------------------------- /docs/tutorials/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/docs/tutorials/tests/__init__.py -------------------------------------------------------------------------------- /docs/tutorials/tests/test_exercise_1.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from exercise_1.fibonacci import generate_fibonacci_hash 3 | 4 | # Memory tests 5 | 6 | 7 | @pytest.mark.limit_memory("100 KB") 8 | def test_fibonacci(): 9 | LENGTH_OF_SEQUENCE_1 = 33333 # pylint: disable=invalid-name 10 | LENGTH_OF_SEQUENCE_2 = 30000 # pylint: disable=invalid-name 11 | LENGTH_OF_SEQUENCE_3 = 34567 # pylint: disable=invalid-name 12 | 13 | generate_fibonacci_hash( 14 | LENGTH_OF_SEQUENCE_1, LENGTH_OF_SEQUENCE_2, LENGTH_OF_SEQUENCE_3 15 | ) 16 | 17 | 18 | # Correctness tests 19 | 20 | 21 | def test_fibonacci_empty(): 22 | h = generate_fibonacci_hash(0, 0, 0) 23 | assert h == 0 24 | 25 | 26 | @pytest.mark.parametrize( 27 | ("length", "expected"), 28 | [ 29 | (1, 1), 30 | (2, 2), # 1 + 1 31 | (6, 20), # 1 + 1 + 2 + 3 + 5 + 8 32 | ], 33 | ) 34 | def test_fibonacci_short(length, expected): 35 | h = generate_fibonacci_hash(0, 0, length) 36 | assert h == expected 37 | 38 | 39 | @pytest.mark.parametrize( 40 | ("length", "expected"), 41 | [ 42 | (1, 1), 43 | (2, 2), # 1 + 1 44 | (6, 20), # 1 + 1 + 2 + 3 + 5 + 8 45 | ], 46 | ) 47 | def test_fibonacci_multiple(length, expected): 48 | h = generate_fibonacci_hash(length, length, length) 49 | assert h == expected * 3 50 | 51 | 52 | def test_hash_modulo_10000(): 53 | # 1 + 1 + 2 +3 + 5 + 8 + 13 + 21 + 34 + 55 + 89 + 144 + 233 + 377 + 610 54 | # + 987 + 1597 + 2584 + 4181 == 10945 55 | h = generate_fibonacci_hash(0, 0, 19) 56 | assert h == 945 # 10945 % 10000 57 | -------------------------------------------------------------------------------- /docs/tutorials/tests/test_exercise_2.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from exercise_2.holding_onto_memory import process_data 4 | 5 | 6 | @pytest.mark.limit_memory("230 MB") 7 | def test_holding_in_memory(): 8 | process_data() 9 | 10 | 11 | def test_result(): 12 | result = process_data() 13 | assert np.all(result == 18) 14 | -------------------------------------------------------------------------------- /docs/tutorials/tests/test_exercise_3.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | import pytest 4 | from exercise_3.lru_cache import Algorithms 5 | from exercise_3.lru_cache import compare_counts_different_factorials 6 | from exercise_3.lru_cache import generate_factorial_plus_last_digit 7 | 8 | # Memory tests 9 | 10 | 11 | @pytest.mark.limit_memory("75 MB") 12 | def test_lru_cache(): 13 | compare_counts_different_factorials() 14 | 15 | 16 | # Correctness tests 17 | 18 | 19 | @pytest.mark.parametrize("value", [1, 4, 10]) 20 | def test_algorithms_0(value): 21 | a = Algorithms(0) # This is equivalent to the standard factorial function 22 | assert a.factorial_plus(value) == math.factorial(value) 23 | 24 | 25 | def test_algorithms_5(): 26 | a = Algorithms(5) 27 | 28 | # 3 * (2 * (1 + 5) + 5) + 5 29 | assert a.factorial_plus(3) == 56 30 | 31 | # 4 * (3 * (2 * (1 + 5) + 5) + 5) + 5 32 | assert a.factorial_plus(4) == 229 33 | 34 | 35 | def test_generate_factorial_plus_last_digit_0(): 36 | values = list(generate_factorial_plus_last_digit(1, 6)) 37 | assert values == [1, 1, 2, 6, 4, 0] # last digits of 1 1 2 6 24 120, i.e. 0! to 5! 38 | 39 | 40 | def test_generate_factorial_plus_0_last_digit(): 41 | values = list(generate_factorial_plus_last_digit(5, 1)) 42 | assert values == [ 43 | 1, 44 | 2, 45 | 3, 46 | 4, 47 | 5, 48 | ] # last digits of the first fac_plus_n factorial of 0, which is always n+1 49 | 50 | 51 | def test_generate_factorial_plus(): 52 | values = list(generate_factorial_plus_last_digit(3, 5)) 53 | expected = ( 54 | [1, 1, 2, 6, 4] 55 | + [2, 2, 5, 6, 5] # last digits of the first fac_plus_0 factorial of n, i.e. n! 56 | + [3, 3, 8, 6, 6] # fac_plus_1 values are [2, 2, 5, 16, 65] 57 | ) # fac_plus_1 values are [3, 3, 8, 26, 106] 58 | assert ( 59 | values == expected 60 | ) # last digits of the first fac_plus(n) factorial of 0, which is always n+1 61 | -------------------------------------------------------------------------------- /news/.gitignore: -------------------------------------------------------------------------------- 1 | !.gitignore 2 | -------------------------------------------------------------------------------- /news/742.removal.1.rst: -------------------------------------------------------------------------------- 1 | We no longer provide manylinux2010 wheels. Previously they were provided for Python 3.7 through 3.12, now we provide manylinux2014 wheels for those Python versions instead. The manylinux project dropped support for manylinux2010 on August 1st, 2022. 2 | -------------------------------------------------------------------------------- /news/742.removal.2.rst: -------------------------------------------------------------------------------- 1 | We no longer provide musllinux_1_1 wheels. Previously they were provided for Python 3.7 through 3.13, now we provide musllinux_1_2 wheels for those Python versions instead. The manylinux project dropped support for musllinux_1_1 on November 1st, 2024. 2 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "memray", 3 | "private": true, 4 | "devDependencies": { 5 | "@babel/preset-env": "^7.14.7", 6 | "jest": "^27.0.1", 7 | "lodash": "^4.17.21", 8 | "webpack": "^5.94.0", 9 | "webpack-cli": "^4.7.0" 10 | }, 11 | "jest": { 12 | "modulePathIgnorePatterns": [ 13 | "/build/", 14 | "/node_modules/" 15 | ] 16 | }, 17 | "scripts": { 18 | "build": "webpack", 19 | "test": "jest" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /requirements-docs.txt: -------------------------------------------------------------------------------- 1 | IPython 2 | sphinx 3 | sphinx-autobuild 4 | sphinx-argparse 5 | furo 6 | -------------------------------------------------------------------------------- /requirements-extra.txt: -------------------------------------------------------------------------------- 1 | mypy 2 | bump2version 3 | towncrier 4 | pre-commit 5 | -r requirements-docs.txt 6 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | Cython 2 | coverage[toml] 3 | greenlet; python_version < '3.14' 4 | pytest 5 | pytest-cov 6 | ipython 7 | setuptools; python_version >= '3.12' 8 | pkgconfig 9 | pytest-textual-snapshot 10 | textual >= 0.43, != 0.65.2, != 0.66 11 | packaging 12 | -------------------------------------------------------------------------------- /src/memray/__init__.py: -------------------------------------------------------------------------------- 1 | from ._ipython import load_ipython_extension 2 | from ._memray import AllocationRecord 3 | from ._memray import AllocatorType 4 | from ._memray import Destination 5 | from ._memray import FileDestination 6 | from ._memray import FileFormat 7 | from ._memray import FileReader 8 | from ._memray import MemorySnapshot 9 | from ._memray import SocketDestination 10 | from ._memray import SocketReader 11 | from ._memray import Tracker 12 | from ._memray import dump_all_records 13 | from ._memray import set_log_level 14 | from ._memray import start_thread_trace 15 | from ._metadata import Metadata 16 | from ._version import __version__ 17 | 18 | __all__ = [ 19 | "AllocationRecord", 20 | "AllocatorType", 21 | "FileFormat", 22 | "MemorySnapshot", 23 | "dump_all_records", 24 | "start_thread_trace", 25 | "Tracker", 26 | "FileReader", 27 | "SocketReader", 28 | "Destination", 29 | "FileDestination", 30 | "SocketDestination", 31 | "Metadata", 32 | "__version__", 33 | "set_log_level", 34 | "load_ipython_extension", 35 | ] 36 | -------------------------------------------------------------------------------- /src/memray/__init__.pyi: -------------------------------------------------------------------------------- 1 | from memray._destination import Destination as Destination 2 | from memray._destination import FileDestination as FileDestination 3 | from memray._destination import SocketDestination as SocketDestination 4 | from memray._metadata import Metadata as Metadata 5 | 6 | from ._memray import AllocationRecord as AllocationRecord 7 | from ._memray import AllocatorType as AllocatorType 8 | from ._memray import FileFormat as FileFormat 9 | from ._memray import FileReader as FileReader 10 | from ._memray import MemorySnapshot as MemorySnapshot 11 | from ._memray import SocketReader as SocketReader 12 | from ._memray import Tracker as Tracker 13 | from ._memray import dump_all_records as dump_all_records 14 | -------------------------------------------------------------------------------- /src/memray/__main__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from memray.commands import main 4 | 5 | if __name__ == "__main__": 6 | sys.exit(main()) 7 | -------------------------------------------------------------------------------- /src/memray/_destination.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import typing 3 | from dataclasses import dataclass 4 | 5 | 6 | @dataclass(frozen=True) 7 | class Destination: 8 | pass 9 | 10 | 11 | @dataclass(frozen=True) 12 | class FileDestination(Destination): 13 | """Specify an output file to write captured allocations into. 14 | 15 | Args: 16 | path: The path to the output file. 17 | overwrite: By default, if a file already exists at that path an 18 | exception will be raised. If you provide ``overwrite=True``, then 19 | the existing file will be overwritten instead. 20 | """ 21 | 22 | path: typing.Union[pathlib.Path, str] 23 | overwrite: bool = False 24 | compress_on_exit: bool = True 25 | 26 | 27 | @dataclass(frozen=True) 28 | class SocketDestination(Destination): 29 | """Specify a port to serve captured allocations on. 30 | 31 | When a ``SocketDestination`` is passed to the `Tracker` constructor, the 32 | process will immediately create a server socket on the given port and wait 33 | for a reader to connect (see :ref:`Live Tracking`). The `Tracker` 34 | constructor will not return until a client has connected. Any records the 35 | tracker goes on to capture will be written over the socket to the attached 36 | client. 37 | 38 | Args: 39 | server_port: The port to accept a client connection on. 40 | address: The address to bind the server socket to. This should 41 | generally be left alone, but you might want to use ``"0.0.0.0"`` to 42 | accept connections from clients on other machines. Note that 43 | sending records to clients on other machines is generally a bad 44 | idea, though. In particular, this won't play nicely with 45 | :ref:`Native Tracking`, because the client on the remote machine 46 | won't have access to the shared libraries used by the tracked 47 | process. 48 | """ 49 | 50 | server_port: int 51 | address: str = "127.0.0.1" 52 | -------------------------------------------------------------------------------- /src/memray/_errors.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | 4 | class MemrayError(Exception): 5 | """Exceptions raised in this package.""" 6 | 7 | 8 | class MemrayCommandError(MemrayError): 9 | """Exceptions raised from this package's CLI commands.""" 10 | 11 | def __init__(self, *args: Any, exit_code: int) -> None: 12 | super().__init__(*args) 13 | self.exit_code = exit_code 14 | -------------------------------------------------------------------------------- /src/memray/_ipython/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | 4 | def load_ipython_extension(ipython: Any) -> None: 5 | from .flamegraph import FlamegraphMagics 6 | 7 | ipython.register_magics(FlamegraphMagics) 8 | 9 | 10 | __all__ = ["load_ipython_extension"] 11 | -------------------------------------------------------------------------------- /src/memray/_memray/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.10) 2 | set(CMAKE_CXX_STANDARD 17) 3 | set(CMAKE_CXX_STANDARD_REQUIRED ON) 4 | project(_memray) 5 | 6 | find_package( 7 | Python 8 | COMPONENTS Interpreter Development 9 | REQUIRED) 10 | 11 | if(CMAKE_HOST_SYSTEM_NAME MATCHES "Darwin") 12 | set(MEMRAY_LINKER_FILE macho_shenanigans.cpp) 13 | else() 14 | set(MEMRAY_LINKER_FILE elf_shenanigans.cpp) 15 | endif() 16 | 17 | add_library( 18 | _memray STATIC 19 | ${MEMRAY_LINKER_FILE} 20 | inject.cpp 21 | compat.cpp 22 | hooks.cpp 23 | logging.cpp 24 | native_resolver.cpp 25 | python_helpers.cpp 26 | record_reader.cpp 27 | record_writer.cpp 28 | records.cpp 29 | sink.cpp 30 | snapshot.cpp 31 | socket_reader_thread.cpp 32 | source.cpp 33 | tracking_api.cpp) 34 | 35 | if(CMAKE_HOST_SYSTEM_NAME MATCHES "Darwin") 36 | execute_process( 37 | COMMAND brew --prefix lz4 38 | RESULT_VARIABLE BREW_LZ4 39 | OUTPUT_VARIABLE BREW_LZ4_PREFIX 40 | OUTPUT_STRIP_TRAILING_WHITESPACE) 41 | if(BREW_LZ4 EQUAL 0 AND EXISTS "${BREW_LZ4_PREFIX}") 42 | message(STATUS "Found Lz4 installed by Homebrew at ${BREW_LZ4_PREFIX}") 43 | include_directories("${BREW_LZ4_PREFIX}/include") 44 | link_directories("${BREW_LZ4_PREFIX}/lib") 45 | endif() 46 | else() 47 | find_package(PkgConfig REQUIRED) 48 | pkg_check_modules(BINARY_DEPS REQUIRED liblz4 libunwind) 49 | target_link_libraries(_memray ${BINARY_DEPS_STATIC_LIBRARIES}) 50 | target_include_directories(_memray PUBLIC ${BINARY_DEPS_INCLUDE_DIRS}) 51 | target_compile_options(_memray PUBLIC ${BINARY_DEPS_CFLAGS}) 52 | target_link_options(_memray PUBLIC ${BINARY_DEPS_LDFLAGS}) 53 | endif() 54 | 55 | include_directories(. ../../vendor/libbacktrace/install/include 56 | ${Python_INCLUDE_DIRS}) 57 | link_directories(../../vendor/libbacktrace/install/lib) 58 | -------------------------------------------------------------------------------- /src/memray/_memray/__init__.pxd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/src/memray/_memray/__init__.pxd -------------------------------------------------------------------------------- /src/memray/_memray/algorithm.pxd: -------------------------------------------------------------------------------- 1 | cdef extern from "" namespace "std" nogil: 2 | ssize_t count[Iter, T](Iter first, Iter last, const T& value) 3 | -------------------------------------------------------------------------------- /src/memray/_memray/alloc.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #ifdef __linux__ 4 | # include 5 | #endif 6 | 7 | #include 8 | 9 | extern "C" { 10 | #ifndef __GLIBC__ 11 | static void* 12 | pvalloc [[maybe_unused]] (size_t size) 13 | { 14 | return NULL; 15 | } 16 | #endif 17 | 18 | #if !defined(_ISOC11_SOURCE) && defined(__GLIBC__) 19 | static void* 20 | aligned_alloc(size_t alignment, size_t size) 21 | { 22 | return NULL; 23 | } 24 | #endif 25 | 26 | #ifdef __APPLE__ 27 | 28 | static void* 29 | memalign [[maybe_unused]] (size_t alignment, size_t size) 30 | { 31 | return NULL; 32 | } 33 | #endif 34 | } 35 | -------------------------------------------------------------------------------- /src/memray/_memray/alloc.pxd: -------------------------------------------------------------------------------- 1 | cdef extern from "alloc.h" nogil: 2 | void *calloc (size_t count, size_t eltsize) 3 | void free (void *ptr) 4 | void *malloc (size_t size) 5 | int posix_memalign(void** memptr, size_t alignment, size_t size) 6 | void* aligned_alloc(size_t alignment, size_t size) 7 | void *realloc (void *ptr, size_t newsize) 8 | void* valloc(size_t size) 9 | void* memalign(size_t alignment, size_t size) 10 | void* pvalloc(size_t size) 11 | 12 | cdef extern from "Python.h": 13 | void* PyMem_RawMalloc(size_t n) nogil 14 | void* PyMem_RawCalloc(size_t nelem, size_t elsize) nogil 15 | void* PyMem_RawRealloc(void *p, size_t n) nogil 16 | void PyMem_RawFree(void *p) nogil 17 | 18 | void* PyMem_Malloc(size_t n) 19 | void* PyMem_Calloc(size_t nelem, size_t elsize) 20 | void* PyMem_Realloc(void *p, size_t n) 21 | void PyMem_Free(void *p) 22 | 23 | void* PyObject_Malloc(size_t size) 24 | void* PyObject_Calloc(size_t nelem, size_t elsize) 25 | void* PyObject_Realloc(void *ptr, size_t new_size) 26 | void PyObject_Free(void *ptr) 27 | -------------------------------------------------------------------------------- /src/memray/_memray/compat.cpp: -------------------------------------------------------------------------------- 1 | #include "compat.h" 2 | 3 | namespace memray::compat { 4 | 5 | void 6 | setprofileAllThreads(Py_tracefunc func, PyObject* arg) 7 | { 8 | assert(PyGILState_Check()); 9 | #if PY_VERSION_HEX >= 0x030D0000 10 | PyEval_SetProfileAllThreads(func, arg); 11 | #else 12 | PyThreadState* this_tstate = PyThreadState_Get(); 13 | PyInterpreterState* interp = threadStateGetInterpreter(this_tstate); 14 | for (PyThreadState* tstate = PyInterpreterState_ThreadHead(interp); tstate != nullptr; 15 | tstate = PyThreadState_Next(tstate)) 16 | { 17 | # if PY_VERSION_HEX >= 0x03090000 18 | if (_PyEval_SetProfile(tstate, func, arg) < 0) { 19 | _PyErr_WriteUnraisableMsg("in PyEval_SetProfileAllThreads", nullptr); 20 | } 21 | # else 22 | // For 3.7 and 3.8, backport _PyEval_SetProfile from 3.9 23 | // https://github.com/python/cpython/blob/v3.9.13/Python/ceval.c#L4738-L4767 24 | PyObject* profileobj = tstate->c_profileobj; 25 | 26 | tstate->c_profilefunc = NULL; 27 | tstate->c_profileobj = NULL; 28 | /* Must make sure that tracing is not ignored if 'profileobj' is freed */ 29 | tstate->use_tracing = tstate->c_tracefunc != NULL; 30 | Py_XDECREF(profileobj); 31 | 32 | Py_XINCREF(arg); 33 | tstate->c_profileobj = arg; 34 | tstate->c_profilefunc = func; 35 | 36 | /* Flag that tracing or profiling is turned on */ 37 | tstate->use_tracing = (func != NULL) || (tstate->c_tracefunc != NULL); 38 | # endif 39 | } 40 | #endif 41 | } 42 | 43 | } // namespace memray::compat 44 | -------------------------------------------------------------------------------- /src/memray/_memray/compat.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #define PY_SSIZE_T_CLEAN 4 | #include 5 | 6 | #include "frameobject.h" 7 | 8 | namespace memray::compat { 9 | 10 | inline int 11 | isPythonFinalizing() 12 | { 13 | #if PY_VERSION_HEX >= 0x030D0000 14 | return Py_IsFinalizing(); 15 | #else 16 | return _Py_IsFinalizing(); 17 | #endif 18 | } 19 | 20 | inline bool 21 | isEntryFrame(PyFrameObject* frame) 22 | { 23 | #if PY_VERSION_HEX >= 0x030B0000 24 | return _PyFrame_IsEntryFrame(frame); 25 | #else 26 | (void)frame; 27 | return true; 28 | #endif 29 | } 30 | 31 | inline PyFrameObject* 32 | threadStateGetFrame(PyThreadState* tstate) 33 | { 34 | #if PY_VERSION_HEX < 0x030B0000 35 | // Prior to Python 3.11 this was exposed. 36 | return tstate->frame; 37 | #else 38 | // Return a borrowed reference. 39 | PyFrameObject* ret = PyThreadState_GetFrame(tstate); 40 | if (ret) { 41 | assert(Py_REFCNT(ret) >= 2); 42 | Py_DECREF(ret); 43 | } 44 | return ret; 45 | #endif 46 | } 47 | 48 | inline PyCodeObject* 49 | frameGetCode(PyFrameObject* frame) 50 | { 51 | #if PY_VERSION_HEX < 0x030B0000 52 | // Prior to Python 3.11 this was exposed. 53 | return frame->f_code; 54 | #else 55 | // Return a borrowed reference. 56 | PyCodeObject* ret = PyFrame_GetCode(frame); 57 | assert(Py_REFCNT(ret) >= 2); 58 | Py_DECREF(ret); 59 | return ret; 60 | #endif 61 | } 62 | 63 | inline PyFrameObject* 64 | frameGetBack(PyFrameObject* frame) 65 | { 66 | #if PY_VERSION_HEX < 0x030B0000 67 | // Prior to Python 3.11 this was exposed. 68 | return frame->f_back; 69 | #else 70 | // Return a borrowed reference. 71 | PyFrameObject* ret = PyFrame_GetBack(frame); 72 | if (ret) { 73 | assert(Py_REFCNT(ret) >= 2); 74 | Py_DECREF(ret); 75 | } 76 | return ret; 77 | #endif 78 | } 79 | 80 | inline PyInterpreterState* 81 | threadStateGetInterpreter(PyThreadState* tstate) 82 | { 83 | #if PY_VERSION_HEX < 0x03090000 84 | return tstate->interp; 85 | #else 86 | return PyThreadState_GetInterpreter(tstate); 87 | #endif 88 | } 89 | 90 | void 91 | setprofileAllThreads(Py_tracefunc func, PyObject* arg); 92 | 93 | } // namespace memray::compat 94 | -------------------------------------------------------------------------------- /src/memray/_memray/exceptions.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | namespace memray::exception { 6 | 7 | class MemrayException : public std::runtime_error 8 | { 9 | using std::runtime_error::runtime_error; 10 | }; 11 | 12 | class IoError : public MemrayException 13 | { 14 | using MemrayException::MemrayException; 15 | }; 16 | 17 | } // namespace memray::exception 18 | -------------------------------------------------------------------------------- /src/memray/_memray/frame_tree.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | #include 4 | 5 | #include "records.h" 6 | 7 | namespace memray::tracking_api { 8 | class FrameTree 9 | { 10 | public: 11 | using index_t = uint32_t; // TODO: Shouldn't this be size_t? 12 | 13 | inline index_t minIndex() const 14 | { 15 | return 1; 16 | } 17 | 18 | inline index_t maxIndex() const 19 | { 20 | return d_graph.size() - 1; 21 | } 22 | 23 | inline std::pair nextNode(index_t index) const 24 | { 25 | assert(1 <= index && index <= d_graph.size() - 1); 26 | return std::make_pair(d_graph[index].frame_id, d_graph[index].parent_index); 27 | } 28 | 29 | using tracecallback_t = std::function; 30 | 31 | template 32 | size_t getTraceIndex(const T& stack_trace, const tracecallback_t& callback) 33 | { 34 | index_t index = 0; 35 | for (const auto& frame : stack_trace) { 36 | index = getTraceIndexUnsafe(index, frame, callback); 37 | } 38 | return index; 39 | } 40 | 41 | size_t getTraceIndex(index_t parent_index, frame_id_t frame) 42 | { 43 | return getTraceIndexUnsafe(parent_index, frame, tracecallback_t()); 44 | } 45 | 46 | private: 47 | size_t getTraceIndexUnsafe(index_t parent_index, frame_id_t frame, const tracecallback_t& callback) 48 | { 49 | Node& parent = d_graph[parent_index]; 50 | auto it = std::lower_bound(parent.children.begin(), parent.children.end(), frame); 51 | if (it == parent.children.end() || it->frame_id != frame) { 52 | index_t new_index = d_graph.size(); 53 | it = parent.children.insert(it, {frame, new_index}); 54 | if (callback && !callback(frame, parent_index)) { 55 | return 0; 56 | } 57 | d_graph.push_back({frame, parent_index}); 58 | } 59 | return it->child_index; 60 | } 61 | 62 | struct DescendentEdge 63 | { 64 | frame_id_t frame_id; 65 | index_t child_index; 66 | 67 | bool operator<(frame_id_t the_frame_id) const 68 | { 69 | return this->frame_id < the_frame_id; 70 | } 71 | }; 72 | 73 | struct Node 74 | { 75 | frame_id_t frame_id; 76 | index_t parent_index; 77 | std::vector children; 78 | }; 79 | std::vector d_graph{{0, 0, {}}}; 80 | }; 81 | } // namespace memray::tracking_api 82 | -------------------------------------------------------------------------------- /src/memray/_memray/hooks.pxd: -------------------------------------------------------------------------------- 1 | from _memray.records cimport Allocation 2 | from libcpp cimport bool 3 | 4 | 5 | cdef extern from "hooks.h" namespace "memray::hooks": 6 | cdef cppclass Allocator: 7 | pass 8 | 9 | bool isDeallocator(const Allocator& allocator) 10 | -------------------------------------------------------------------------------- /src/memray/_memray/linker_shenanigans.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | 6 | #include 7 | 8 | namespace memray::linker { 9 | 10 | static void 11 | _dummy(void){}; 12 | 13 | class SymbolPatcher 14 | { 15 | private: 16 | std::set symbols; 17 | std::string self_so_name = "_memray.cpython-"; 18 | 19 | public: 20 | SymbolPatcher() 21 | { 22 | Dl_info info; 23 | if (dladdr((void*)&_dummy, &info)) { 24 | self_so_name = info.dli_fname; 25 | } 26 | } 27 | void overwrite_symbols() noexcept; 28 | void restore_symbols() noexcept; 29 | }; 30 | } // namespace memray::linker 31 | -------------------------------------------------------------------------------- /src/memray/_memray/logging.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include "logging.h" 5 | 6 | namespace memray { 7 | 8 | static int LOG_THRESHOLD = static_cast(logLevel::WARNING); 9 | 10 | static const char* 11 | prefixFromLogLevel(int level) 12 | { 13 | if (level >= CRITICAL) return "Memray CRITICAL: "; 14 | if (level >= ERROR) return "Memray ERROR: "; 15 | if (level >= WARNING) return "Memray WARNING: "; 16 | if (level >= INFO) return "Memray INFO: "; 17 | if (level >= DEBUG) return "Memray DEBUG: "; 18 | return "Memray TRACE: "; 19 | } 20 | 21 | void 22 | setLogThreshold(int threshold) 23 | { 24 | LOG_THRESHOLD = threshold; 25 | } 26 | 27 | logLevel 28 | getLogThreshold() 29 | { 30 | return static_cast(LOG_THRESHOLD); 31 | } 32 | 33 | void 34 | logToStderr(const std::string& message, int level) 35 | { 36 | if (level < LOG_THRESHOLD) { 37 | return; 38 | } 39 | 40 | std::cerr << prefixFromLogLevel(level) << message << std::endl; 41 | } 42 | 43 | } // namespace memray 44 | -------------------------------------------------------------------------------- /src/memray/_memray/logging.h: -------------------------------------------------------------------------------- 1 | #ifndef _MEMRAY_LOGGING_H 2 | #define _MEMRAY_LOGGING_H 3 | 4 | #include 5 | #include 6 | 7 | namespace memray { 8 | 9 | enum logLevel { 10 | NOTSET = 0, 11 | DEBUG = 10, 12 | INFO = 20, 13 | WARNING = 30, 14 | ERROR = 40, 15 | CRITICAL = 50, 16 | }; 17 | 18 | void 19 | logToStderr(const std::string& message, int level); 20 | 21 | void 22 | setLogThreshold(int threshold); 23 | 24 | logLevel 25 | getLogThreshold(); 26 | 27 | class LOG 28 | { 29 | public: 30 | // Constructors 31 | LOG() 32 | : msgLevel(INFO){}; 33 | 34 | explicit LOG(logLevel type) 35 | { 36 | msgLevel = type; 37 | }; 38 | 39 | // Destructors 40 | ~LOG() 41 | { 42 | logToStderr(buffer.str(), msgLevel); 43 | }; 44 | 45 | // Operators 46 | template 47 | LOG& operator<<(const T& msg) 48 | { 49 | if (msgLevel < getLogThreshold()) { 50 | return *this; 51 | } 52 | buffer << msg; 53 | return *this; 54 | }; 55 | 56 | private: 57 | // Data members 58 | std::ostringstream buffer; 59 | logLevel msgLevel = DEBUG; 60 | }; 61 | 62 | } // namespace memray 63 | 64 | #endif //_MEMRAY_LOGGING_H 65 | -------------------------------------------------------------------------------- /src/memray/_memray/logging.pxd: -------------------------------------------------------------------------------- 1 | cdef extern from "logging.h" namespace "memray": 2 | void setLogThreshold(int) 3 | -------------------------------------------------------------------------------- /src/memray/_memray/native_resolver.pxd: -------------------------------------------------------------------------------- 1 | from libcpp.string cimport string 2 | from libcpp.vector cimport vector 3 | 4 | 5 | cdef extern from "native_resolver.h" namespace "memray::native_resolver": 6 | vector[string] unwindHere() except+ 7 | -------------------------------------------------------------------------------- /src/memray/_memray/pthread.pxd: -------------------------------------------------------------------------------- 1 | cdef extern from "" nogil: 2 | ctypedef int pthread_t 3 | 4 | ctypedef struct pthread_attr_t: 5 | pass 6 | ctypedef struct pthread_mutexattr_t: 7 | pass 8 | ctypedef struct pthread_mutex_t: 9 | pass 10 | 11 | enum: 12 | PTHREAD_CANCEL_ENABLE 13 | PTHREAD_CANCEL_DISABLE 14 | 15 | int pthread_cancel(pthread_t thread) 16 | int pthread_setcancelstate(int state, int *oldstate) 17 | pthread_t pthread_self() 18 | int pthread_equal(pthread_t t1, pthread_t t2) 19 | int pthread_create(pthread_t *thread, pthread_attr_t *attr, 20 | void *(*start_routine) (void *), void *arg) 21 | int pthread_join(pthread_t thread, void **retval) 22 | int pthread_kill(pthread_t thread, int sig) 23 | -------------------------------------------------------------------------------- /src/memray/_memray/python_helpers.cpp: -------------------------------------------------------------------------------- 1 | #include "python_helpers.h" 2 | 3 | namespace memray::python_helpers { 4 | PyObject* 5 | PyUnicode_Cache::getUnicodeObject(const std::string& str) 6 | { 7 | auto it = d_cache.find(str); 8 | if (it == d_cache.end()) { 9 | PyObject* pystring = PyUnicode_FromString(str.c_str()); 10 | if (pystring == nullptr) { 11 | return nullptr; 12 | } 13 | auto pystring_capsule = py_capsule_t(pystring, [](auto obj) { Py_DECREF(obj); }); 14 | it = d_cache.emplace(str, std::move(pystring_capsule)).first; 15 | } 16 | return it->second.get(); 17 | } 18 | } // namespace memray::python_helpers 19 | -------------------------------------------------------------------------------- /src/memray/_memray/python_helpers.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #define PY_SSIZE_T_CLEAN 4 | #include 5 | 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | namespace memray::python_helpers { 12 | class PyUnicode_Cache 13 | { 14 | public: 15 | PyObject* getUnicodeObject(const std::string& str); 16 | 17 | private: 18 | using py_capsule_t = std::unique_ptr>; 19 | std::unordered_map d_cache{}; 20 | }; 21 | } // namespace memray::python_helpers 22 | -------------------------------------------------------------------------------- /src/memray/_memray/record_reader.pxd: -------------------------------------------------------------------------------- 1 | from _memray.records cimport AggregatedAllocation 2 | from _memray.records cimport Allocation 3 | from _memray.records cimport HeaderRecord 4 | from _memray.records cimport MemoryRecord 5 | from _memray.records cimport MemorySnapshot 6 | from _memray.records cimport optional_frame_id_t 7 | from _memray.source cimport Source 8 | from libcpp cimport bool 9 | from libcpp.memory cimport unique_ptr 10 | from libcpp.string cimport string 11 | from libcpp.vector cimport vector 12 | 13 | 14 | cdef extern from "record_reader.h" namespace "memray::api": 15 | cdef enum RecordResult 'memray::api::RecordReader::RecordResult': 16 | RecordResultAllocationRecord 'memray::api::RecordReader::RecordResult::ALLOCATION_RECORD' 17 | RecordResultAggregatedAllocationRecord 'memray::api::RecordReader::RecordResult::AGGREGATED_ALLOCATION_RECORD' 18 | RecordResultMemoryRecord 'memray::api::RecordReader::RecordResult::MEMORY_RECORD' 19 | RecordResultMemorySnapshot 'memray::api::RecordReader::RecordResult::MEMORY_SNAPSHOT' 20 | RecordResultError 'memray::api::RecordReader::RecordResult::ERROR' 21 | RecordResultEndOfFile 'memray::api::RecordReader::RecordResult::END_OF_FILE' 22 | 23 | cdef cppclass RecordReader: 24 | RecordReader(unique_ptr[Source]) except+ 25 | RecordReader(unique_ptr[Source], bool track_stacks) except+ 26 | void close() 27 | bool isOpen() const 28 | RecordResult nextRecord() except+ 29 | object Py_GetStackFrame(unsigned int frame_id) except+ 30 | object Py_GetStackFrame(unsigned int frame_id, size_t max_stacks) except+ 31 | object Py_GetStackFrameAndEntryInfo( 32 | unsigned int frame_id, vector[unsigned char]* is_entry_frame 33 | ) except+ 34 | object Py_GetStackFrameAndEntryInfo( 35 | unsigned int frame_id, vector[unsigned char]* is_entry_frame, size_t max_stacks 36 | ) except+ 37 | object Py_GetNativeStackFrame(int frame_id, size_t generation) except+ 38 | object Py_GetNativeStackFrame(int frame_id, size_t generation, size_t max_stacks) except+ 39 | optional_frame_id_t getLatestPythonFrameId(const Allocation&) except+ 40 | object Py_GetFrame(optional_frame_id_t frame) except+ 41 | HeaderRecord getHeader() 42 | size_t getMainThreadTid() 43 | size_t getSkippedFramesOnMainThread() 44 | object dumpAllRecords() except+ 45 | string getThreadName(long int tid) except+ 46 | Allocation getLatestAllocation() 47 | MemoryRecord getLatestMemoryRecord() 48 | AggregatedAllocation getLatestAggregatedAllocation() 49 | MemorySnapshot getLatestMemorySnapshot() 50 | -------------------------------------------------------------------------------- /src/memray/_memray/record_writer.pxd: -------------------------------------------------------------------------------- 1 | from _memray.records cimport FileFormat 2 | from _memray.sink cimport Sink 3 | from libcpp cimport bool 4 | from libcpp.memory cimport unique_ptr 5 | from libcpp.string cimport string 6 | 7 | 8 | cdef extern from "record_writer.h" namespace "memray::api": 9 | cdef cppclass RecordWriter: 10 | pass 11 | cdef unique_ptr[RecordWriter] createRecordWriter( 12 | unique_ptr[Sink], 13 | string command_line, 14 | bool native_trace, 15 | FileFormat file_format, 16 | bool trace_python_allocators, 17 | ) except+ 18 | -------------------------------------------------------------------------------- /src/memray/_memray/records.pxd: -------------------------------------------------------------------------------- 1 | from _memray.hooks cimport Allocator 2 | from libc.stdint cimport uintptr_t 3 | from libcpp cimport bool 4 | from libcpp.string cimport string 5 | from libcpp.vector cimport vector 6 | 7 | 8 | cdef extern from "records.h" namespace "memray::tracking_api": 9 | ctypedef unsigned long thread_id_t 10 | ctypedef size_t frame_id_t 11 | 12 | struct Frame: 13 | string function_name 14 | string filename 15 | int lineno 16 | 17 | struct TrackerStats: 18 | size_t n_allocations 19 | size_t n_frames 20 | long long start_time 21 | long long end_time 22 | 23 | cdef enum FileFormat: 24 | ALL_ALLOCATIONS 'memray::tracking_api::FileFormat::ALL_ALLOCATIONS' 25 | AGGREGATED_ALLOCATIONS 'memray::tracking_api::FileFormat::AGGREGATED_ALLOCATIONS' 26 | 27 | struct HeaderRecord: 28 | int version 29 | bool native_traces 30 | FileFormat file_format 31 | TrackerStats stats 32 | string command_line 33 | int pid 34 | size_t main_tid 35 | size_t skipped_frames_on_main_tid 36 | int python_allocator 37 | bool trace_python_allocators 38 | 39 | cdef cppclass Allocation: 40 | thread_id_t tid 41 | uintptr_t address 42 | size_t size 43 | Allocator allocator 44 | frame_id_t native_frame_id 45 | size_t frame_index 46 | size_t native_segment_generation 47 | size_t n_allocations 48 | 49 | object toPythonObject() 50 | 51 | cdef cppclass AggregatedAllocation: 52 | thread_id_t tid 53 | Allocator allocator 54 | frame_id_t native_frame_id 55 | size_t frame_index 56 | size_t native_segment_generation 57 | 58 | size_t n_allocations_in_high_water_mark 59 | size_t n_allocations_leaked 60 | size_t bytes_in_high_water_mark 61 | size_t bytes_leaked 62 | 63 | Allocation contributionToHighWaterMark() 64 | Allocation contributionToLeaks() 65 | 66 | struct MemoryRecord: 67 | unsigned long int ms_since_epoch 68 | size_t rss 69 | 70 | struct MemorySnapshot: 71 | unsigned long int ms_since_epoch 72 | size_t rss 73 | size_t heap 74 | 75 | 76 | cdef extern from "": 77 | # Cython doesn't have libcpp.optional yet, so just declare this opaquely. 78 | cdef cppclass optional_frame_id_t "std::optional": 79 | pass 80 | -------------------------------------------------------------------------------- /src/memray/_memray/sink.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include "records.h" 9 | 10 | namespace memray::io { 11 | 12 | class Sink 13 | { 14 | public: 15 | virtual ~Sink(){}; 16 | virtual bool writeAll(const char* data, size_t length) = 0; 17 | virtual bool seek(off_t offset, int whence) = 0; 18 | virtual std::unique_ptr cloneInChildProcess() = 0; 19 | virtual bool flush() 20 | { 21 | return true; 22 | } 23 | }; 24 | 25 | class FileSink : public memray::io::Sink 26 | { 27 | public: 28 | FileSink(const std::string& file_name, bool overwrite, bool compress); 29 | ~FileSink() override; 30 | FileSink(FileSink&) = delete; 31 | FileSink(FileSink&&) = delete; 32 | void operator=(const FileSink&) = delete; 33 | void operator=(const FileSink&&) = delete; 34 | 35 | bool writeAll(const char* data, size_t length) override; 36 | bool seek(off_t offset, int whence) override; 37 | std::unique_ptr cloneInChildProcess() override; 38 | 39 | private: 40 | void compress() noexcept; 41 | bool grow(size_t needed); 42 | bool slideWindow(); 43 | size_t bytesBeyondBufferNeedle(); 44 | 45 | std::string d_filename; 46 | std::string d_fileNameStem; 47 | bool d_compress{1}; 48 | int d_fd{-1}; 49 | size_t d_fileSize{0}; 50 | const size_t BUFFER_SIZE{16 * 1024 * 1024}; // 16 MiB 51 | size_t d_bufferOffset{0}; 52 | char* d_buffer{nullptr}; 53 | char* d_bufferEnd{nullptr}; // exclusive 54 | char* d_bufferNeedle{nullptr}; 55 | }; 56 | 57 | class SocketSink : public Sink 58 | { 59 | public: 60 | explicit SocketSink(std::string host, uint16_t port); 61 | ~SocketSink() override; 62 | 63 | SocketSink(SocketSink&) = delete; 64 | SocketSink(SocketSink&&) = delete; 65 | void operator=(const SocketSink&) = delete; 66 | void operator=(const SocketSink&&) = delete; 67 | 68 | bool writeAll(const char* data, size_t length) override; 69 | bool seek(off_t offset, int whence) override; 70 | std::unique_ptr cloneInChildProcess() override; 71 | bool flush() override; 72 | 73 | private: 74 | size_t freeSpaceInBuffer(); 75 | void open(); 76 | bool _flush(); 77 | 78 | const std::string d_host; 79 | uint16_t d_port; 80 | int d_socket_fd{-1}; 81 | bool d_socket_open{false}; 82 | 83 | const size_t BUFFER_SIZE{PIPE_BUF}; 84 | std::unique_ptr d_buffer{nullptr}; 85 | char* d_bufferNeedle{nullptr}; 86 | }; 87 | 88 | class NullSink : public Sink 89 | { 90 | public: 91 | ~NullSink() override; 92 | bool writeAll(const char* data, size_t length) override; 93 | bool seek(off_t offset, int whence) override; 94 | std::unique_ptr cloneInChildProcess() override; 95 | }; 96 | 97 | } // namespace memray::io 98 | -------------------------------------------------------------------------------- /src/memray/_memray/sink.pxd: -------------------------------------------------------------------------------- 1 | from libc.stdint cimport int16_t 2 | from libcpp cimport bool 3 | from libcpp.string cimport string 4 | 5 | 6 | cdef extern from "sink.h" namespace "memray::io": 7 | cdef cppclass Sink: 8 | pass 9 | 10 | cdef cppclass FileSink(Sink): 11 | FileSink(const string& file_name, bool overwrite, bool compress) except +IOError 12 | 13 | cdef cppclass SocketSink(Sink): 14 | SocketSink(string host, unsigned int port) except +IOError 15 | 16 | cdef cppclass NullSink(Sink): 17 | NullSink() except +IOError 18 | -------------------------------------------------------------------------------- /src/memray/_memray/socket_reader_thread.cpp: -------------------------------------------------------------------------------- 1 | #include "socket_reader_thread.h" 2 | 3 | #include 4 | 5 | namespace memray::socket_thread { 6 | 7 | void 8 | BackgroundSocketReader::backgroundThreadWorker() 9 | { 10 | while (true) { 11 | if (d_stop_thread) { 12 | break; 13 | } 14 | 15 | const auto record_type = d_record_reader->nextRecord(); 16 | 17 | if (d_stop_thread) { 18 | break; 19 | } 20 | 21 | switch (record_type) { 22 | case RecordResult::ALLOCATION_RECORD: { 23 | std::lock_guard lock(d_mutex); 24 | d_aggregator.addAllocation(d_record_reader->getLatestAllocation()); 25 | } break; 26 | 27 | case RecordResult::MEMORY_RECORD: { 28 | } break; 29 | 30 | case RecordResult::AGGREGATED_ALLOCATION_RECORD: { 31 | // This should never happen. We checked the source format in 32 | // the constructor, and RecordReader should never return 33 | // records that don't match the source format. 34 | std::cerr << "BUG: AGGREGATED_ALLOCATION_RECORD from ALL_ALLOCATIONS input" << std::endl; 35 | abort(); 36 | } break; 37 | 38 | case RecordResult::MEMORY_SNAPSHOT: { 39 | // As above. 40 | std::cerr << "BUG: MEMORY_SNAPSHOT from ALL_ALLOCATIONS input" << std::endl; 41 | abort(); 42 | } break; 43 | 44 | case RecordResult::END_OF_FILE: 45 | case RecordResult::ERROR: { 46 | d_stop_thread = true; 47 | } break; 48 | } 49 | } 50 | } 51 | 52 | BackgroundSocketReader::BackgroundSocketReader(std::shared_ptr reader) 53 | : d_record_reader(reader) 54 | { 55 | if (d_record_reader->getHeader().file_format != api::FileFormat::ALL_ALLOCATIONS) { 56 | throw std::runtime_error("BackgroundSocketReader only supports ALL_ALLOCATIONS"); 57 | } 58 | } 59 | 60 | void 61 | BackgroundSocketReader::start() 62 | { 63 | d_thread = std::thread(&BackgroundSocketReader::backgroundThreadWorker, this); 64 | } 65 | 66 | BackgroundSocketReader::~BackgroundSocketReader() 67 | { 68 | d_record_reader->close(); 69 | d_stop_thread = true; 70 | d_thread.join(); 71 | } 72 | 73 | PyObject* 74 | BackgroundSocketReader::Py_GetSnapshotAllocationRecords(bool merge_threads) 75 | { 76 | api::reduced_snapshot_map_t stack_to_allocation; 77 | { 78 | std::lock_guard lock(d_mutex); 79 | stack_to_allocation = d_aggregator.getSnapshotAllocations(merge_threads); 80 | } 81 | 82 | return api::Py_ListFromSnapshotAllocationRecords(stack_to_allocation); 83 | } 84 | 85 | bool 86 | BackgroundSocketReader::is_active() const 87 | { 88 | return !d_stop_thread; 89 | } 90 | 91 | } // namespace memray::socket_thread 92 | -------------------------------------------------------------------------------- /src/memray/_memray/socket_reader_thread.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #define PY_SSIZE_T_CLEAN 4 | #include 5 | 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include "record_reader.h" 12 | #include "snapshot.h" 13 | 14 | namespace memray::socket_thread { 15 | 16 | class BackgroundSocketReader 17 | { 18 | private: 19 | using RecordResult = api::RecordReader::RecordResult; 20 | 21 | std::atomic d_stop_thread{false}; 22 | std::mutex d_mutex; 23 | std::shared_ptr d_record_reader; 24 | 25 | api::SnapshotAllocationAggregator d_aggregator; 26 | std::thread d_thread; 27 | 28 | void backgroundThreadWorker(); 29 | 30 | public: 31 | BackgroundSocketReader(BackgroundSocketReader& other) = delete; 32 | BackgroundSocketReader(BackgroundSocketReader&& other) = delete; 33 | void operator=(const BackgroundSocketReader&) = delete; 34 | void operator=(BackgroundSocketReader&&) = delete; 35 | 36 | explicit BackgroundSocketReader(std::shared_ptr reader); 37 | ~BackgroundSocketReader(); 38 | 39 | void start(); 40 | bool is_active() const; 41 | PyObject* Py_GetSnapshotAllocationRecords(bool merge_threads); 42 | }; 43 | 44 | } // namespace memray::socket_thread 45 | -------------------------------------------------------------------------------- /src/memray/_memray/socket_reader_thread.pxd: -------------------------------------------------------------------------------- 1 | from _memray.record_reader cimport RecordReader 2 | from libcpp cimport bool 3 | from libcpp cimport int 4 | from libcpp.memory cimport shared_ptr 5 | 6 | 7 | cdef extern from "socket_reader_thread.h" namespace "memray::socket_thread": 8 | cdef cppclass BackgroundSocketReader: 9 | BackgroundSocketReader(shared_ptr[RecordReader]) except+ 10 | 11 | void start() except+ 12 | bool is_active() 13 | object Py_GetSnapshotAllocationRecords(bool merge_threads) 14 | -------------------------------------------------------------------------------- /src/memray/_memray/source.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | #include "lz4_stream.h" 10 | 11 | namespace memray::io { 12 | 13 | const int MAX_BUF_SIZE = 4096; 14 | 15 | class Source 16 | { 17 | public: 18 | virtual ~Source(){}; 19 | virtual void close() = 0; 20 | virtual bool is_open() = 0; 21 | virtual bool read(char* result, ssize_t length) = 0; 22 | virtual bool getline(std::string& result, char delimiter) = 0; 23 | }; 24 | 25 | class FileSource : public Source 26 | { 27 | public: 28 | FileSource(FileSource& other) = delete; 29 | FileSource(FileSource&& other) = delete; 30 | void operator=(const FileSource&) = delete; 31 | void operator=(FileSource&&) = delete; 32 | 33 | FileSource(const std::string& file_name); 34 | ~FileSource() override; 35 | void close() override; 36 | bool is_open() override; 37 | bool read(char* result, ssize_t length) override; 38 | bool getline(std::string& result, char delimiter) override; 39 | 40 | private: 41 | void _close(); 42 | void findReadableSize(); 43 | const std::string& d_file_name; 44 | std::shared_ptr d_raw_stream; 45 | std::shared_ptr d_stream; 46 | std::streamoff d_readable_size{}; 47 | std::streamoff d_bytes_read{}; 48 | }; 49 | 50 | class SocketBuf : public std::streambuf 51 | { 52 | public: 53 | explicit SocketBuf(int socket_fd); 54 | void close(); 55 | 56 | private: 57 | int underflow() override; 58 | std::streamsize xsgetn(char_type* s, std::streamsize n) override; 59 | int d_sockfd{-1}; 60 | char d_buf[MAX_BUF_SIZE]; 61 | std::atomic d_open{true}; 62 | }; 63 | 64 | class SocketSource : public Source 65 | { 66 | public: 67 | SocketSource(SocketSource& other) = delete; 68 | SocketSource(SocketSource&& other) = delete; 69 | void operator=(const SocketSource&) = delete; 70 | void operator=(SocketSource&&) = delete; 71 | 72 | SocketSource(int port); 73 | ~SocketSource() override; 74 | void close() override; 75 | bool is_open() override; 76 | bool read(char* result, ssize_t length) override; 77 | bool getline(std::string& result, char delimiter) override; 78 | 79 | private: 80 | void _close(); 81 | int d_sockfd{-1}; 82 | std::atomic d_is_open{false}; 83 | std::unique_ptr d_socket_buf; 84 | }; 85 | 86 | } // namespace memray::io 87 | -------------------------------------------------------------------------------- /src/memray/_memray/source.pxd: -------------------------------------------------------------------------------- 1 | from libcpp cimport bool 2 | from libcpp.string cimport string 3 | 4 | 5 | cdef extern from "source.h" namespace "memray::io": 6 | cdef cppclass Source: 7 | pass 8 | 9 | cdef cppclass FileSource(Source): 10 | FileSource(const string& file_name) except+ IOError 11 | 12 | cdef cppclass SocketSource(Source): 13 | SocketSource(int port) except+ IOError 14 | -------------------------------------------------------------------------------- /src/memray/_memray/tracking_api.pxd: -------------------------------------------------------------------------------- 1 | from _memray.record_writer cimport RecordWriter 2 | from libc.stdint cimport uint64_t 3 | from libcpp cimport bool 4 | from libcpp.memory cimport unique_ptr 5 | from libcpp.string cimport string 6 | 7 | 8 | cdef extern from "tracking_api.h" namespace "memray::tracking_api": 9 | void set_up_pthread_fork_handlers() except+ 10 | void install_trace_function() except* 11 | 12 | cdef cppclass Tracker: 13 | @staticmethod 14 | object createTracker( 15 | unique_ptr[RecordWriter] record_writer, 16 | bool native_traces, 17 | unsigned int memory_interval, 18 | bool follow_fork, 19 | bool trace_pymalloc, 20 | ) except+ 21 | 22 | @staticmethod 23 | object destroyTracker() except + 24 | 25 | @staticmethod 26 | Tracker* getTracker() 27 | 28 | @staticmethod 29 | void forgetPythonStack() except+ 30 | 31 | @staticmethod 32 | void beginTrackingGreenlets() except+ 33 | 34 | @staticmethod 35 | void handleGreenletSwitch(object, object) except+ 36 | 37 | @staticmethod 38 | void registerThreadNameById(uint64_t, const char*) except+ 39 | 40 | @staticmethod 41 | void childFork() noexcept nogil 42 | -------------------------------------------------------------------------------- /src/memray/_metadata.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import typing 4 | from dataclasses import dataclass 5 | from datetime import datetime 6 | 7 | if typing.TYPE_CHECKING: 8 | from ._memray import FileFormat 9 | 10 | 11 | @dataclass 12 | class Metadata: 13 | start_time: datetime 14 | end_time: datetime 15 | total_allocations: int 16 | total_frames: int 17 | peak_memory: int 18 | command_line: str 19 | pid: int 20 | main_thread_id: int 21 | python_allocator: str 22 | has_native_traces: bool 23 | trace_python_allocators: bool 24 | file_format: FileFormat 25 | -------------------------------------------------------------------------------- /src/memray/_stats.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from ._metadata import Metadata 4 | 5 | 6 | @dataclass 7 | class Stats: 8 | metadata: Metadata 9 | total_num_allocations: int 10 | total_memory_allocated: int 11 | peak_memory_allocated: int 12 | allocation_count_by_size: dict 13 | allocation_count_by_allocator: dict 14 | top_locations_by_size: list 15 | top_locations_by_count: list 16 | -------------------------------------------------------------------------------- /src/memray/_stats.pyi: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from ._memray import PythonStackElement 4 | from ._metadata import Metadata 5 | 6 | @dataclass 7 | class Stats: 8 | metadata: Metadata 9 | total_num_allocations: int 10 | total_memory_allocated: int 11 | peak_memory_allocated: int 12 | allocation_count_by_size: dict[int, int] 13 | allocation_count_by_allocator: dict[str, int] 14 | top_locations_by_size: list[tuple[PythonStackElement, int]] 15 | top_locations_by_count: list[tuple[PythonStackElement, int]] 16 | -------------------------------------------------------------------------------- /src/memray/_test.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | from ._test_utils import MemoryAllocator as _MemoryAllocator 4 | from ._test_utils import MmapAllocator 5 | from ._test_utils import PrimeCaches 6 | from ._test_utils import PymallocDomain 7 | from ._test_utils import PymallocMemoryAllocator 8 | from ._test_utils import _cython_allocate_in_two_places 9 | from ._test_utils import _cython_nested_allocation 10 | from ._test_utils import allocate_cpp_vector 11 | from ._test_utils import allocate_without_gil_held 12 | from ._test_utils import exit 13 | from ._test_utils import fill_cpp_vector 14 | from ._test_utils import function_caller 15 | from ._test_utils import set_thread_name 16 | 17 | 18 | class MemoryAllocator: 19 | def __init__(self) -> None: 20 | self.allocator = _MemoryAllocator() 21 | 22 | def free(self) -> None: 23 | return self.allocator.free() 24 | 25 | def malloc(self, size: int) -> bool: 26 | return self.allocator.malloc(size) 27 | 28 | def calloc(self, size: int) -> bool: 29 | return self.allocator.calloc(size) 30 | 31 | def realloc(self, size: int) -> bool: 32 | return self.allocator.realloc(size) 33 | 34 | def posix_memalign(self, size: int) -> bool: 35 | return self.allocator.posix_memalign(size) 36 | 37 | def aligned_alloc(self, size: int) -> bool: 38 | return self.allocator.aligned_alloc(size) 39 | 40 | def memalign(self, size: int) -> bool: 41 | return self.allocator.memalign(size) 42 | 43 | def valloc(self, size: int) -> bool: 44 | return self.allocator.valloc(size) 45 | 46 | def pvalloc(self, size: int) -> bool: 47 | return self.allocator.pvalloc(size) 48 | 49 | def run_in_pthread(self, callback: Callable[[], None]) -> None: 50 | return self.allocator.run_in_pthread(callback) 51 | 52 | 53 | __all__ = [ 54 | "allocate_cpp_vector", 55 | "MemoryAllocator", 56 | "MmapAllocator", 57 | "PymallocDomain", 58 | "PymallocMemoryAllocator", 59 | "_cython_allocate_in_two_places", 60 | "_cython_nested_allocation", 61 | "allocate_without_gil_held", 62 | "function_caller", 63 | "set_thread_name", 64 | "fill_cpp_vector", 65 | "exit", 66 | "PrimeCaches", 67 | ] 68 | -------------------------------------------------------------------------------- /src/memray/_test_utils.pyi: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | from ._memray import PymallocDomain as PymallocDomain 4 | 5 | class MemoryAllocator: 6 | def __init__(self) -> None: ... 7 | def free(self) -> None: ... 8 | def malloc(self, size: int) -> bool: ... 9 | def calloc(self, size: int) -> bool: ... 10 | def realloc(self, size: int) -> bool: ... 11 | def posix_memalign(self, size: int) -> bool: ... 12 | def aligned_alloc(self, size: int) -> bool: ... 13 | def memalign(self, size: int) -> bool: ... 14 | def valloc(self, size: int) -> bool: ... 15 | def pvalloc(self, size: int) -> bool: ... 16 | def run_in_pthread(self, callback: Callable[[], None]) -> None: ... 17 | 18 | class PymallocMemoryAllocator: 19 | def __init__(self, domain: PymallocDomain) -> None: ... 20 | def free(self) -> None: ... 21 | def malloc(self, size: int) -> None: ... 22 | def calloc(self, size: int) -> None: ... 23 | def realloc(self, size: int) -> None: ... 24 | 25 | class MmapAllocator: 26 | def __init__(self, size: int, address: int = 0) -> None: ... 27 | @property 28 | def address(self) -> int: ... 29 | def munmap(self, length: int, offset: int = 0) -> None: ... 30 | 31 | def _cython_nested_allocation( 32 | allocator_fn: Callable[[int], None], size: int 33 | ) -> None: ... 34 | def _cython_allocate_in_two_places(size: int) -> None: ... 35 | def set_thread_name(name: str) -> int: ... 36 | def function_caller(func: Callable[[], None]) -> None: ... 37 | def allocate_without_gil_held(wake_up_main_fd: int, wake_up_thread_fd: int) -> None: ... 38 | def allocate_cpp_vector(size: int) -> int: ... 39 | def fill_cpp_vector(size: int) -> int: ... 40 | def exit(py_finalize: bool = False) -> None: ... 41 | 42 | class PrimeCaches: 43 | def __init__(self, size: int) -> None: ... 44 | def __enter__(self) -> None: ... 45 | def __exit__(self, *args: object) -> None: ... 46 | -------------------------------------------------------------------------------- /src/memray/_thread_name_interceptor.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from typing import Callable 3 | 4 | 5 | class ThreadNameInterceptor: 6 | """Record the name of each threading.Thread for Memray's reports. 7 | 8 | The name can be set either before or after the thread is started, and from 9 | either the same thread or a different thread. Whenever an assignment to 10 | either `Thread._name` or `Thread._ident` is performed and the other has 11 | already been set, we call a callback with the thread's ident and name. 12 | """ 13 | 14 | def __init__(self, attr: str, callback: Callable[[int, str], None]) -> None: 15 | self._attr = attr 16 | self._callback = callback 17 | 18 | def __set__(self, instance: threading.Thread, value: object) -> None: 19 | instance.__dict__[self._attr] = value 20 | ident = instance.__dict__.get("_ident") 21 | name = instance.__dict__.get("_name") 22 | if ident is not None and name is not None: 23 | self._callback(ident, name) 24 | -------------------------------------------------------------------------------- /src/memray/_version.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.17.2" 2 | -------------------------------------------------------------------------------- /src/memray/commands/_attach.gdb: -------------------------------------------------------------------------------- 1 | p "MEMRAY: Attached to process." 2 | 3 | set unwindonsignal on 4 | sharedlibrary libc 5 | sharedlibrary libdl 6 | sharedlibrary musl 7 | sharedlibrary libpython 8 | info sharedlibrary 9 | 10 | p "MEMRAY: Checking if process is Python 3.7+." 11 | 12 | p PyMem_Malloc 13 | p PyMem_Calloc 14 | p PyMem_Realloc 15 | p PyMem_Free 16 | 17 | p "MEMRAY: Process is Python 3.7+." 18 | set scheduler-locking on 19 | call (int)Py_AddPendingCall(&PyCallable_Check, (void*)0) 20 | 21 | # When updating this list, also update the "commands" call below, 22 | # and the breakpoints hardcoded for lldb in attach.py 23 | b malloc 24 | b calloc 25 | b realloc 26 | b free 27 | b PyMem_Malloc 28 | b PyMem_Calloc 29 | b PyMem_Realloc 30 | b PyMem_Free 31 | b PyErr_CheckSignals 32 | b PyCallable_Check 33 | # Apply commands to all 10 breakpoints above 34 | commands 1-10 35 | bt 36 | disable breakpoints 37 | delete breakpoints 38 | call (void*)dlopen($libpath, $rtld_now) 39 | p (char*)dlerror() 40 | eval "sharedlibrary %s", $libpath 41 | p (int)memray_spawn_client($port) ? "FAILURE" : "SUCCESS" 42 | end 43 | set scheduler-locking off 44 | continue 45 | -------------------------------------------------------------------------------- /src/memray/commands/_attach.lldb: -------------------------------------------------------------------------------- 1 | p "MEMRAY: Attached to process." 2 | 3 | p "MEMRAY: Checking if process is Python 3.7+." 4 | 5 | p ((void*(*)(size_t))PyMem_Malloc) 6 | p ((void*(*)(size_t, size_t))PyMem_Calloc) 7 | p ((void*(*)(void *, size_t))PyMem_Realloc) 8 | p ((void(*)(void*))PyMem_Free) 9 | 10 | p "MEMRAY: Process is Python 3.7+." 11 | 12 | # When adding new breakpoints, also update _attach.gdb 13 | breakpoint set -b malloc -b calloc -b realloc -b free -b PyMem_Malloc -b PyMem_Calloc -b PyMem_Realloc -b PyMem_Free 14 | 15 | # Set commands to execute when breakpoint is reached 16 | breakpoint command add -e true 17 | breakpoint disable 18 | expr auto $dlsym = (void* (*)(void*, const char*))&::dlsym 19 | expr auto $dlopen = $dlsym($rtld_default, "dlopen") 20 | expr auto $dlerror = $dlsym($rtld_default, "dlerror") 21 | expr auto $dll = ((void*(*)(const char*, int))$dlopen)($libpath, $rtld_now) 22 | p ((char*(*)(void))$dlerror)() 23 | expr auto $spawn = $dlsym($dll, "memray_spawn_client") 24 | p ((int(*)(int))$spawn)($port)?"FAILURE":"SUCCESS" 25 | DONE 26 | 27 | continue 28 | -------------------------------------------------------------------------------- /src/memray/commands/flamegraph.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | from ..reporters.flamegraph import FlameGraphReporter 4 | from .common import HighWatermarkCommand 5 | 6 | 7 | class FlamegraphCommand(HighWatermarkCommand): 8 | """Generate an HTML flame graph for peak memory usage""" 9 | 10 | def __init__(self) -> None: 11 | super().__init__( 12 | reporter_factory=FlameGraphReporter.from_snapshot, 13 | temporal_reporter_factory=FlameGraphReporter.from_temporal_snapshot, 14 | reporter_name="flamegraph", 15 | ) 16 | 17 | def prepare_parser(self, parser: argparse.ArgumentParser) -> None: 18 | super().prepare_parser(parser) 19 | parser.add_argument( 20 | "--split-threads", 21 | help="Do not merge allocations across threads", 22 | action="store_true", 23 | default=False, 24 | ) 25 | 26 | parser.add_argument( 27 | "--inverted", 28 | help="Invert flame graph", 29 | action="store_true", 30 | default=False, 31 | ) 32 | 33 | parser.add_argument( 34 | "--max-memory-records", 35 | help="Maximum number of memory records to display", 36 | type=int, 37 | default=None, 38 | ) 39 | -------------------------------------------------------------------------------- /src/memray/commands/live.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from contextlib import suppress 3 | from typing import Optional 4 | 5 | from memray import SocketReader 6 | from memray._errors import MemrayCommandError 7 | from memray.reporters.tui import TUIApp 8 | 9 | 10 | class LiveCommand: 11 | """Remotely monitor allocations in a text-based interface""" 12 | 13 | def prepare_parser(self, parser: argparse.ArgumentParser) -> None: 14 | parser.add_argument( 15 | "port", 16 | help="Remote port to connect to", 17 | default=None, 18 | type=int, 19 | ) 20 | 21 | def run(self, args: argparse.Namespace, parser: argparse.ArgumentParser) -> None: 22 | with suppress(KeyboardInterrupt): 23 | self.start_live_interface(args.port) 24 | 25 | def start_live_interface( 26 | self, port: int, cmdline_override: Optional[str] = None 27 | ) -> None: 28 | if port >= 2**16 or port <= 0: 29 | raise MemrayCommandError(f"Invalid port: {port}", exit_code=1) 30 | with SocketReader(port=port) as reader: 31 | TUIApp(reader, cmdline_override=cmdline_override).run() 32 | -------------------------------------------------------------------------------- /src/memray/commands/parse.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | 4 | from memray import dump_all_records 5 | from memray._errors import MemrayCommandError 6 | 7 | 8 | class ParseCommand: 9 | """Debug a results file by parsing and printing each record in it""" 10 | 11 | def prepare_parser(self, parser: argparse.ArgumentParser) -> None: 12 | parser.add_argument("results", help="Results of the tracker run") 13 | 14 | def run(self, args: argparse.Namespace, parser: argparse.ArgumentParser) -> None: 15 | if os.isatty(1): 16 | raise MemrayCommandError( 17 | "You must redirect stdout to a file or shell pipeline.", 18 | exit_code=1, 19 | ) 20 | 21 | try: 22 | dump_all_records(args.results) 23 | except OSError as e: 24 | raise MemrayCommandError( 25 | f"Failed to parse allocation records in {args.results}\nReason: {e}", 26 | exit_code=1, 27 | ) 28 | -------------------------------------------------------------------------------- /src/memray/commands/table.py: -------------------------------------------------------------------------------- 1 | from ..reporters.table import TableReporter 2 | from .common import HighWatermarkCommand 3 | 4 | 5 | class TableCommand(HighWatermarkCommand): 6 | """Generate an HTML table with all records in the peak memory usage""" 7 | 8 | def __init__(self) -> None: 9 | super().__init__( 10 | reporter_factory=TableReporter.from_snapshot, 11 | reporter_name="table", 12 | ) 13 | -------------------------------------------------------------------------------- /src/memray/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/src/memray/py.typed -------------------------------------------------------------------------------- /src/memray/reporters/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from typing import TextIO 3 | 4 | from memray import Metadata 5 | 6 | if sys.version_info >= (3, 8): 7 | from typing import Protocol 8 | else: 9 | from typing_extensions import Protocol 10 | 11 | 12 | class BaseReporter(Protocol): 13 | def render( 14 | self, 15 | outfile: TextIO, 16 | metadata: Metadata, 17 | show_memory_leaks: bool, 18 | merge_threads: bool, 19 | inverted: bool, 20 | ) -> None: 21 | ... 22 | -------------------------------------------------------------------------------- /src/memray/reporters/_textual_hacks.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from typing import Any 3 | from typing import Dict 4 | from typing import Tuple 5 | from typing import Union 6 | 7 | from textual import binding 8 | from textual.app import App 9 | from textual.binding import Binding 10 | from textual.dom import DOMNode 11 | from textual.widgets import Footer 12 | 13 | # In Textual 0.61, `App.namespace_bindings` was removed in favor of 14 | # `Screen.active_bindings`. The two have a slightly different interface: 15 | # a 2 item `tuple` was updated to a 3 item `namedtuple`. 16 | # The `Bindings` type alias shows the two possible structures. 17 | # The `update_key_description` implementation works for both, 18 | # since we still support Textual versions below 0.61. 19 | 20 | Bindings = Union[Dict[str, "binding.ActiveBinding"], Dict[str, Tuple[DOMNode, Binding]]] 21 | 22 | 23 | def update_key_description(bindings: Bindings, key: str, description: str) -> None: 24 | val = bindings[key] 25 | binding = dataclasses.replace(val[1], description=description) 26 | if type(val) is tuple: 27 | bindings[key] = val[:1] + (binding,) + val[2:] # type: ignore 28 | else: 29 | bindings[key] = val._replace(binding=binding) # type: ignore 30 | 31 | 32 | def redraw_footer(app: App[Any]) -> None: 33 | footer = app.screen.query_one(Footer) 34 | if hasattr(footer, "recompose"): 35 | # Added in Textual v0.53 36 | footer.refresh(recompose=True) 37 | else: # pragma: no cover 38 | # Hack: trick the Footer into redrawing itself 39 | footer.highlight_key = "q" # type: ignore[attr-defined] 40 | footer.highlight_key = None # type: ignore[attr-defined] 41 | -------------------------------------------------------------------------------- /src/memray/reporters/assets/README.md: -------------------------------------------------------------------------------- 1 | # Uncompiled JS assets 2 | 3 | These files are compiled using webpack into the `templates/assets` directory. 4 | These sources are included in the source distribution as well as the wheel 5 | distribution. The wheel distribution also contains the compiled JS files. 6 | -------------------------------------------------------------------------------- /src/memray/reporters/assets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/src/memray/reporters/assets/__init__.py -------------------------------------------------------------------------------- /src/memray/reporters/assets/table.js: -------------------------------------------------------------------------------- 1 | import { humanFileSize, initMemoryGraph, resizeMemoryGraph } from "./common"; 2 | window.resizeMemoryGraph = resizeMemoryGraph; 3 | 4 | function main() { 5 | data = packed_data; 6 | 7 | initMemoryGraph(memory_records); 8 | 9 | const columns = [ 10 | { 11 | title: "Thread ID", 12 | data: "tid", 13 | }, 14 | { 15 | title: "Size", 16 | data: "size", 17 | render: function (data, type, row, meta) { 18 | if (type === "sort" || type === "type") { 19 | return data; 20 | } 21 | 22 | return humanFileSize(data); 23 | }, 24 | }, 25 | { 26 | title: "Allocator", 27 | data: "allocator", 28 | }, 29 | { 30 | title: "Allocations", 31 | data: "n_allocations", 32 | }, 33 | { 34 | title: "Location", 35 | data: "stack_trace", 36 | }, 37 | ]; 38 | 39 | var table = $("#the_table").DataTable({ 40 | data: data, 41 | columns: columns, 42 | order: [[2, "desc"]], 43 | pageLength: 100, 44 | dom: "ip", 45 | }); 46 | const searchButton = $("#searchTerm"); 47 | searchButton.on("input", () => { 48 | const searchTerm = $("#searchTerm").val(); 49 | table.search(searchTerm).draw(); 50 | }); 51 | // Enable tooltips 52 | $('[data-toggle-second="tooltip"]').tooltip(); 53 | $('[data-toggle="tooltip"]').tooltip(); 54 | } 55 | 56 | document.addEventListener("DOMContentLoaded", main); 57 | resizeMemoryGraph(); 58 | -------------------------------------------------------------------------------- /src/memray/reporters/common.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from memray._memray import AllocationRecord 4 | from memray._memray import TemporalAllocationRecord 5 | 6 | 7 | def format_thread_name( 8 | record: Union[AllocationRecord, TemporalAllocationRecord] 9 | ) -> str: 10 | if record.tid == -1: 11 | return "merged thread" 12 | name = record.thread_name 13 | thread_id = hex(record.tid) 14 | return f"{thread_id} ({name})" if name else f"{thread_id}" 15 | -------------------------------------------------------------------------------- /src/memray/reporters/frame_tools.py: -------------------------------------------------------------------------------- 1 | """Tools for processing and filtering stack frames.""" 2 | import functools 3 | import re 4 | from typing import Tuple 5 | 6 | Symbol = str 7 | File = str 8 | Lineno = int 9 | StackFrame = Tuple[Symbol, File, Lineno] 10 | 11 | RE_CPYTHON_PATHS = re.compile(r"(Include|Objects|Modules|Python|cpython).*\.[c|h]$") 12 | 13 | SYMBOL_IGNORELIST = { 14 | "PyObject_Call", 15 | "call_function", 16 | "classmethoddescr_call", 17 | "cmpwrapper_call", 18 | "do_call_core", 19 | "fast_function", 20 | "function_call", 21 | "function_code_fastcall", 22 | "instance_call", 23 | "instancemethod_call", 24 | "instancemethod_call", 25 | "methoddescr_call", 26 | "proxy_call", 27 | "slot_tp_call", 28 | "trace_call_function", 29 | "type_call", 30 | "weakref_call", 31 | "wrap_call", 32 | "wrapper_call", 33 | "wrapperdescr_call", 34 | } 35 | 36 | 37 | @functools.lru_cache(maxsize=1000) 38 | def _is_cpython_internal_symbol(symbol: str, file: str) -> bool: 39 | if "PyEval_EvalFrameEx" in symbol or "_PyEval_EvalFrameDefault" in symbol: 40 | is_candidate = True 41 | elif symbol.startswith(("PyEval", "_Py")): 42 | is_candidate = True 43 | elif "vectorcall" in symbol.lower(): 44 | is_candidate = True 45 | elif symbol in SYMBOL_IGNORELIST: 46 | is_candidate = True 47 | elif "Objects/call.c" in file: 48 | is_candidate = True 49 | else: 50 | is_candidate = False 51 | 52 | if is_candidate: 53 | return re.search(RE_CPYTHON_PATHS, file) is not None 54 | return False 55 | 56 | 57 | def is_cpython_internal(frame: StackFrame) -> bool: 58 | symbol, file, _ = frame 59 | return _is_cpython_internal_symbol(symbol, file) 60 | 61 | 62 | def is_frame_interesting(frame: StackFrame) -> bool: 63 | function, file, _ = frame 64 | 65 | if file.endswith("runpy.py") or file == "": 66 | return False 67 | 68 | return not _is_cpython_internal_symbol(function, file) 69 | 70 | 71 | def is_frame_from_import_system(frame: StackFrame) -> bool: 72 | function, file, _ = frame 73 | if "frozen importlib" in file: 74 | return True 75 | if function in {"import_name", "import_from", "import_all_from"} and file.endswith( 76 | "ceval.c" 77 | ): 78 | return True 79 | return False 80 | -------------------------------------------------------------------------------- /src/memray/reporters/table.py: -------------------------------------------------------------------------------- 1 | import html 2 | from typing import Any 3 | from typing import Dict 4 | from typing import Iterable 5 | from typing import List 6 | from typing import TextIO 7 | 8 | from memray import AllocationRecord 9 | from memray import AllocatorType 10 | from memray import MemorySnapshot 11 | from memray import Metadata 12 | from memray.reporters.common import format_thread_name 13 | from memray.reporters.templates import render_report 14 | 15 | 16 | class TableReporter: 17 | def __init__( 18 | self, 19 | data: List[Dict[str, Any]], 20 | *, 21 | memory_records: Iterable[MemorySnapshot], 22 | ): 23 | super().__init__() 24 | self.data = data 25 | self.memory_records = memory_records 26 | 27 | @classmethod 28 | def from_snapshot( 29 | cls, 30 | allocations: Iterable[AllocationRecord], 31 | *, 32 | memory_records: Iterable[MemorySnapshot], 33 | native_traces: bool, 34 | **kwargs: Any, 35 | ) -> "TableReporter": 36 | result = [] 37 | for record in allocations: 38 | stack_trace = ( 39 | list(record.hybrid_stack_trace(max_stacks=1)) 40 | if native_traces 41 | else record.stack_trace(max_stacks=1) 42 | ) 43 | stack = "???" 44 | if stack_trace: 45 | function, file, line = stack_trace[0] 46 | stack = f"{function} at {file}:{line}" 47 | 48 | allocator = AllocatorType(record.allocator) 49 | result.append( 50 | { 51 | "tid": format_thread_name(record), 52 | "size": record.size, 53 | "allocator": allocator.name.lower(), 54 | "n_allocations": record.n_allocations, 55 | "stack_trace": html.escape(stack), 56 | } 57 | ) 58 | 59 | return cls(result, memory_records=memory_records) 60 | 61 | def render( 62 | self, 63 | outfile: TextIO, 64 | metadata: Metadata, 65 | show_memory_leaks: bool, 66 | merge_threads: bool, 67 | inverted: bool, 68 | ) -> None: 69 | if not merge_threads: 70 | raise NotImplementedError("TableReporter only supports merged threads.") 71 | if inverted: 72 | raise NotImplementedError( 73 | "TableReporter does not support inverted argument" 74 | ) 75 | html_code = render_report( 76 | kind="table", 77 | data=self.data, 78 | metadata=metadata, 79 | memory_records=self.memory_records, 80 | show_memory_leaks=show_memory_leaks, 81 | merge_threads=merge_threads, 82 | inverted=inverted, 83 | ) 84 | print(html_code, file=outfile) 85 | -------------------------------------------------------------------------------- /src/memray/reporters/templates/__init__.py: -------------------------------------------------------------------------------- 1 | """Templates to render reports in HTML.""" 2 | from functools import lru_cache 3 | from typing import Any 4 | from typing import Dict 5 | from typing import Iterable 6 | from typing import Union 7 | 8 | import jinja2 9 | from markupsafe import Markup 10 | 11 | from memray import MemorySnapshot 12 | from memray import Metadata 13 | 14 | 15 | @lru_cache(maxsize=1) 16 | def get_render_environment() -> jinja2.Environment: 17 | loader = jinja2.PackageLoader("memray.reporters") 18 | env = jinja2.Environment(loader=loader) 19 | 20 | def include_file(name: str) -> Markup: 21 | """Include a file from the templates directory without 22 | interpolating its contents""" 23 | source, *_ = loader.get_source(env, name) 24 | return Markup(source) 25 | 26 | env.globals["include_file"] = include_file 27 | env.policies["json.dumps_kwargs"] = {"sort_keys": True, "separators": (",", ":")} 28 | return env 29 | 30 | 31 | def get_report_title( 32 | *, kind: str, show_memory_leaks: bool, inverted: bool = False 33 | ) -> str: 34 | parts = [] 35 | if inverted: 36 | parts.append("inverted") 37 | parts.append(kind) 38 | parts.append("report") 39 | if show_memory_leaks: 40 | parts.append("(memory leaks)") 41 | return " ".join(parts) 42 | 43 | 44 | def render_report( 45 | *, 46 | kind: str, 47 | data: Union[Dict[str, Any], Iterable[Dict[str, Any]]], 48 | metadata: Metadata, 49 | memory_records: Iterable[MemorySnapshot], 50 | show_memory_leaks: bool, 51 | merge_threads: bool, 52 | inverted: bool, 53 | ) -> str: 54 | env = get_render_environment() 55 | template = env.get_template(kind + ".html") 56 | 57 | pretty_kind = kind.replace("_", " ") 58 | title = get_report_title( 59 | kind=pretty_kind, 60 | show_memory_leaks=show_memory_leaks, 61 | inverted=inverted, 62 | ) 63 | return template.render( 64 | kind=pretty_kind, 65 | title=title, 66 | data=data, 67 | metadata=metadata, 68 | memory_records=memory_records, 69 | show_memory_leaks=show_memory_leaks, 70 | merge_threads=merge_threads, 71 | inverted=inverted, 72 | ) 73 | -------------------------------------------------------------------------------- /src/memray/reporters/templates/assets/.gitattributes: -------------------------------------------------------------------------------- 1 | *.js -diff 2 | -------------------------------------------------------------------------------- /src/memray/reporters/templates/assets/flamegraph.css: -------------------------------------------------------------------------------- 1 | /* Blocks */ 2 | .d3-flame-graph rect { 3 | fill-opacity: 0.8; 4 | stroke: #1111; 5 | stroke-width: 1px; 6 | } 7 | .d3-flame-graph .frame:hover rect { 8 | stroke: #1118; 9 | stroke-width: 1px; 10 | cursor: pointer; 11 | } 12 | 13 | .d3-flame-graph-label { 14 | color: black; 15 | pointer-events: none; 16 | 17 | font-size: 13px; 18 | line-height: 1.5; 19 | 20 | text-align: left; 21 | text-overflow: ellipsis; 22 | overflow: hidden; 23 | white-space: nowrap; 24 | 25 | padding: 0; 26 | margin-left: 4px; 27 | margin-right: 4px; 28 | } 29 | 30 | /* Bacckground stuff */ 31 | .d3-flame-graph .fade { 32 | opacity: 0.6 !important; 33 | } 34 | 35 | /* Tooltip */ 36 | .d3-flame-graph-tip { 37 | z-index: 10; 38 | 39 | background: black; 40 | border: none; 41 | color: white; 42 | 43 | text-align: left; 44 | 45 | min-width: 250px; 46 | border-radius: 2px; 47 | padding: 2px 4px; 48 | } 49 | 50 | .tooltip-inner { 51 | max-width: 300px; 52 | } 53 | 54 | /* Loading animation */ 55 | 56 | #loading { 57 | position: absolute; 58 | top: 50%; 59 | left: 50%; 60 | transform: translate(-50%, -50%); 61 | text-align: center; 62 | } 63 | 64 | .loading-spinner { 65 | border: 4px solid #f3f3f3; 66 | border-top: 4px solid #3498db; 67 | border-radius: 50%; 68 | width: 50px; 69 | height: 50px; 70 | animation: spin 2s linear infinite; 71 | } 72 | 73 | @keyframes spin { 74 | 0% { 75 | transform: rotate(0deg); 76 | } 77 | 100% { 78 | transform: rotate(360deg); 79 | } 80 | } 81 | 82 | #overlay { 83 | position: fixed; 84 | top: 0; 85 | left: 0; 86 | width: 100%; 87 | height: 100%; 88 | background-color: rgba(0, 0, 0, 0.5); /* semi-transparent black */ 89 | z-index: 99; /* make sure it's on top of other elements */ 90 | } 91 | 92 | .flamegraph-icon { 93 | display: flex; 94 | align-items: center; 95 | justify-content: center; 96 | } 97 | 98 | .flipped svg { transform: scale(1,-1); } 99 | -------------------------------------------------------------------------------- /src/memray/reporters/templates/assets/table.css: -------------------------------------------------------------------------------- 1 | .tooltip-inner { 2 | max-width: 300px; 3 | } 4 | -------------------------------------------------------------------------------- /src/memray/reporters/templates/classic_base.html: -------------------------------------------------------------------------------- 1 | {# Extends base.html and adds zoomable memory chart without sliders. #} 2 | {% extends "base.html" %} 3 | 4 | {% block topbar_buttons %} 5 | {{ super() }} 6 | 7 | {% endblock %} 8 | 9 | {% block extra_nav %} 10 | 13 | {% endblock %} 14 | 15 | {% block extra_modal %} 16 | 17 | 35 | {% endblock %} 36 | -------------------------------------------------------------------------------- /src/memray/reporters/templates/table.html: -------------------------------------------------------------------------------- 1 | {% extends "classic_base.html" %} 2 | 3 | {% block topbar_buttons %} 4 | {{ super() }} 5 | {% endblock %} 6 | 7 | {% block content %} 8 |
9 | 10 |
11 |
12 | {% endblock %} 13 | 14 | {% block help %} 15 | {% if show_memory_leaks %} 16 |

17 | The table reporter provides a simple tabular representation of memory 18 | allocations, for memory that was leaked during the tracking period 19 | (i.e. allocated and not deallocated). 20 |

21 | 27 | {% else %} 28 |

29 | The table reporter provides a simple tabular representation of memory 30 | allocations in the target when the memory usage was at its peak. 31 |

32 | {% endif %} 33 |

34 | You can find more information in the documentation. 35 |

36 | {% endblock %} 37 | 38 | {% block styles %} 39 | {{ super() }} 40 | 41 | {% endblock %} 42 | 43 | {% block scripts %} 44 | {{ super() }} 45 | 46 | 47 | 50 | {% endblock %} 51 | -------------------------------------------------------------------------------- /src/memray/reporters/templates/temporal_flamegraph.html: -------------------------------------------------------------------------------- 1 | {% extends "flamegraph.html" %} 2 | 3 | {% block content %} 4 |
5 |
6 | How to use this plot 7 | 10 |
11 |
12 | You can move the plot slider to select different ranges for the flame 13 | graph. The flame graph shows the allocations that are created in the 14 | selected range that are not deallocated before the end of the range. 15 |
16 |
17 | 18 |
19 | 23 | 24 |
25 |
26 |
27 | {% endblock %} 28 | 29 | {% block slider_help %} 30 |

31 | Initially the report shows allocations made at any time and not freed before 32 | tracking was deactivated. By using the two sliders on the bottom line chart, 33 | you can select a different time range for analysis instead. The flame graph 34 | will be updated to reflect allocations made within your chosen time window 35 | and not freed within it. 36 |

37 | {% endblock %} 38 | 39 | {% block flamegraph_script %} 40 | 47 | {% endblock %} 48 | -------------------------------------------------------------------------------- /src/memray/reporters/tree.css: -------------------------------------------------------------------------------- 1 | Label { 2 | padding: 1 3; 3 | } 4 | 5 | #frame-detail-grid Label { 6 | color: $text; 7 | height: auto; 8 | width: 100%; 9 | background: $panel-lighten-1; 10 | } 11 | 12 | #frame-detail-grid { 13 | grid-size: 1 2; 14 | grid-gutter: 1 2; 15 | padding: 0 1; 16 | border: thick $background 80%; 17 | background: $surface; 18 | } 19 | 20 | #detailcol { 21 | width: 40%; 22 | max-width: 100; 23 | } 24 | 25 | TextArea { 26 | scrollbar-size-vertical: 0; 27 | border: none; 28 | padding: 0; 29 | } 30 | -------------------------------------------------------------------------------- /src/memray/reporters/tui.css: -------------------------------------------------------------------------------- 1 | TUI { 2 | layout: vertical; 3 | } 4 | 5 | #head { 6 | layout: horizontal; 7 | height: 1; 8 | } 9 | 10 | #head_time_display { 11 | width: 30; 12 | text-align: right; 13 | dock: right; 14 | } 15 | 16 | Header { 17 | height: 7; 18 | } 19 | 20 | .narrow Header { 21 | height: 11; 22 | } 23 | 24 | #header_container { 25 | layout: horizontal; 26 | } 27 | 28 | .narrow #header_container { 29 | layout: vertical; 30 | } 31 | 32 | #header_metadata { 33 | layout: vertical; 34 | width: 100%; 35 | height: 7; 36 | border: blank; 37 | border-title-align: right; 38 | border-title-color: $success-lighten-1; 39 | overflow: auto auto; 40 | } 41 | 42 | .narrow #header_metadata { 43 | border: none; 44 | height: 5; 45 | } 46 | 47 | #header_metadata_grid { 48 | layout: grid; 49 | height: 3; 50 | width: auto; 51 | grid-size: 2; 52 | grid-columns: auto auto; 53 | grid-gutter: 0 3; 54 | overflow: hidden hidden; 55 | } 56 | 57 | #header_metadata > #status_message { 58 | column-span: 2; 59 | width: auto; 60 | height: 1; 61 | } 62 | 63 | MemoryGraph { 64 | dock: right; 65 | border: round $success-lighten-1; 66 | box-sizing: content-box; 67 | height: 4; 68 | max-width: 50; 69 | } 70 | 71 | #memory_graph_container { 72 | dock: right; 73 | box-sizing: content-box; 74 | width: 30%; 75 | max-width: 52; 76 | } 77 | 78 | .narrow #memory_graph_container { 79 | dock: bottom; 80 | box-sizing: border-box; 81 | width: 100%; 82 | max-width: 100%; 83 | height: 6; 84 | } 85 | 86 | Footer { 87 | background: transparent; 88 | } 89 | 90 | Footer > .footer--key { 91 | background: rgb(30, 144, 255); 92 | } 93 | 94 | AllocationTable { 95 | height: 1fr; 96 | } 97 | 98 | .narrow AllocationTable { 99 | height: 100%; 100 | } 101 | 102 | AllocationTable > ScrollView { 103 | height: 100%; 104 | } 105 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.o 3 | *.lo 4 | *.a 5 | *.la 6 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/LICENSE: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2012-2016 Free Software Foundation, Inc. 2 | 3 | # Redistribution and use in source and binary forms, with or without 4 | # modification, are permitted provided that the following conditions are 5 | # met: 6 | 7 | # (1) Redistributions of source code must retain the above copyright 8 | # notice, this list of conditions and the following disclaimer. 9 | 10 | # (2) Redistributions in binary form must reproduce the above copyright 11 | # notice, this list of conditions and the following disclaimer in 12 | # the documentation and/or other materials provided with the 13 | # distribution. 14 | 15 | # (3) The name of the author may not be used to 16 | # endorse or promote products derived from this software without 17 | # specific prior written permission. 18 | 19 | # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 20 | # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 21 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 23 | # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 24 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 26 | # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 27 | # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 28 | # IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 | # POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/README.md: -------------------------------------------------------------------------------- 1 | # libbacktrace 2 | A C library that may be linked into a C/C++ program to produce symbolic backtraces 3 | 4 | Initially written by Ian Lance Taylor . 5 | 6 | This is version 1.0. 7 | It is likely that this will always be version 1.0. 8 | 9 | The libbacktrace library may be linked into a program or library and 10 | used to produce symbolic backtraces. 11 | Sample uses would be to print a detailed backtrace when an error 12 | occurs or to gather detailed profiling information. 13 | 14 | In general the functions provided by this library are async-signal-safe, 15 | meaning that they may be safely called from a signal handler. 16 | That said, on systems that use `dl_iterate_phdr`, such as GNU/Linux, 17 | gitthe first call to a libbacktrace function will call `dl_iterate_phdr`, 18 | which is not in general async-signal-safe. Therefore, programs 19 | that call libbacktrace from a signal handler should ensure that they 20 | make an initial call from outside of a signal handler. 21 | Similar considerations apply when arranging to call libbacktrace 22 | from within malloc; `dl_iterate_phdr` can also call malloc, 23 | so make an initial call to a libbacktrace function outside of 24 | malloc before trying to call libbacktrace functions within malloc. 25 | 26 | The libbacktrace library is provided under a BSD license. 27 | See the source files for the exact license text. 28 | 29 | The public functions are declared and documented in the header file 30 | backtrace.h, which should be #include'd by a user of the library. 31 | 32 | Building libbacktrace will generate a file backtrace-supported.h, 33 | which a user of the library may use to determine whether backtraces 34 | will work. 35 | See the source file backtrace-supported.h.in for the macros that it 36 | defines. 37 | 38 | As of July 2024, libbacktrace supports ELF, PE/COFF, Mach-O, and 39 | XCOFF executables with DWARF debugging information. 40 | In other words, it supports GNU/Linux, *BSD, macOS, Windows, and AIX. 41 | The library is written to make it straightforward to add support for 42 | other object file and debugging formats. 43 | 44 | The library relies on the C++ unwind API defined at 45 | https://itanium-cxx-abi.github.io/cxx-abi/abi-eh.html 46 | This API is provided by GCC and clang. 47 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/config/enable.m4: -------------------------------------------------------------------------------- 1 | dnl ---------------------------------------------------------------------- 2 | dnl This whole bit snagged from libstdc++-v3. 3 | 4 | dnl 5 | dnl GCC_ENABLE 6 | dnl (FEATURE, DEFAULT, HELP-ARG, HELP-STRING) 7 | dnl (FEATURE, DEFAULT, HELP-ARG, HELP-STRING, permit a|b|c) 8 | dnl (FEATURE, DEFAULT, HELP-ARG, HELP-STRING, SHELL-CODE-HANDLER) 9 | dnl 10 | dnl See docs/html/17_intro/configury.html#enable for documentation. 11 | dnl 12 | AC_DEFUN([GCC_ENABLE],[dnl 13 | m4_define([_g_switch],[--enable-$1])dnl 14 | m4_define([_g_help],[AC_HELP_STRING(_g_switch$3,[$4 @<:@default=$2@:>@])])dnl 15 | AC_ARG_ENABLE($1,_g_help, 16 | m4_bmatch([$5], 17 | [^permit ], 18 | [[ 19 | case "$enableval" in 20 | m4_bpatsubst([$5],[permit ])) ;; 21 | *) AC_MSG_ERROR(Unknown argument to enable/disable $1) ;; 22 | dnl Idea for future: generate a URL pointing to 23 | dnl "onlinedocs/configopts.html#whatever" 24 | esac 25 | ]], 26 | [^$], 27 | [[ 28 | case "$enableval" in 29 | yes|no) ;; 30 | *) AC_MSG_ERROR(Argument to enable/disable $1 must be yes or no) ;; 31 | esac 32 | ]], 33 | [[$5]]), 34 | [enable_]m4_bpatsubst([$1],-,_)[=][$2]) 35 | m4_undefine([_g_switch])dnl 36 | m4_undefine([_g_help])dnl 37 | ]) 38 | 39 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/config/lead-dot.m4: -------------------------------------------------------------------------------- 1 | # -*- Autoconf -*- 2 | # Copyright (C) 2003, 2009 Free Software Foundation, Inc. 3 | 4 | # This program is free software; you can redistribute it and/or modify 5 | # it under the terms of the GNU General Public License as published by 6 | # the Free Software Foundation; either version 3, or (at your option) 7 | # any later version. 8 | 9 | # This program is distributed in the hope that it will be useful, 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | # GNU General Public License for more details. 13 | 14 | # You should have received a copy of the GNU General Public License 15 | # along with this program; see the file COPYING3. If not see 16 | # . 17 | 18 | # serial 1 19 | 20 | # Check whether the underlying file-system supports filenames 21 | # with a leading dot. For instance MS-DOS doesn't. 22 | AC_DEFUN([AM_SET_LEADING_DOT], 23 | [rm -rf .tst 2>/dev/null 24 | mkdir .tst 2>/dev/null 25 | if test -d .tst; then 26 | am__leading_dot=. 27 | else 28 | am__leading_dot=_ 29 | fi 30 | rmdir .tst 2>/dev/null 31 | AC_SUBST([am__leading_dot])]) 32 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/config/ltversion.m4: -------------------------------------------------------------------------------- 1 | # ltversion.m4 -- version numbers -*- Autoconf -*- 2 | # 3 | # Copyright (C) 2004 Free Software Foundation, Inc. 4 | # Written by Scott James Remnant, 2004 5 | # 6 | # This file is free software; the Free Software Foundation gives 7 | # unlimited permission to copy and/or distribute it, with or without 8 | # modifications, as long as this notice is preserved. 9 | 10 | # Generated from ltversion.in. 11 | 12 | # serial 3134 ltversion.m4 13 | # This file is part of GNU Libtool 14 | 15 | m4_define([LT_PACKAGE_VERSION], [2.2.7a]) 16 | m4_define([LT_PACKAGE_REVISION], [1.3134]) 17 | 18 | AC_DEFUN([LTVERSION_VERSION], 19 | [macro_version='2.2.7a' 20 | macro_revision='1.3134' 21 | _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) 22 | _LT_DECL(, macro_revision, 0) 23 | ]) 24 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/config/multi.m4: -------------------------------------------------------------------------------- 1 | ## -*- Autoconf -*- 2 | # Copyright (C) 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2008 3 | # Free Software Foundation, Inc. 4 | # 5 | # This file is free software; the Free Software Foundation 6 | # gives unlimited permission to copy and/or distribute it, 7 | # with or without modifications, as long as this notice is preserved. 8 | 9 | # serial 6 10 | 11 | # AM_ENABLE_MULTILIB([MAKEFILE], [REL-TO-TOP-SRCDIR]) 12 | # --------------------------------------------------- 13 | # Add --enable-multilib to configure. 14 | AC_DEFUN([AM_ENABLE_MULTILIB], 15 | [# Default to --enable-multilib 16 | AC_ARG_ENABLE(multilib, 17 | [ --enable-multilib build many library versions (default)], 18 | [case "$enableval" in 19 | yes) multilib=yes ;; 20 | no) multilib=no ;; 21 | *) AC_MSG_ERROR([bad value $enableval for multilib option]) ;; 22 | esac], 23 | [multilib=yes]) 24 | 25 | # We may get other options which we leave undocumented: 26 | # --with-target-subdir, --with-multisrctop, --with-multisubdir 27 | # See config-ml.in if you want the gory details. 28 | 29 | if test "$srcdir" = "."; then 30 | if test "$with_target_subdir" != "."; then 31 | multi_basedir="$srcdir/$with_multisrctop../$2" 32 | else 33 | multi_basedir="$srcdir/$with_multisrctop$2" 34 | fi 35 | else 36 | multi_basedir="$srcdir/$2" 37 | fi 38 | AC_SUBST(multi_basedir) 39 | 40 | # Even if the default multilib is not a cross compilation, 41 | # it may be that some of the other multilibs are. 42 | if test $cross_compiling = no && test $multilib = yes \ 43 | && test "x${with_multisubdir}" != x ; then 44 | cross_compiling=maybe 45 | fi 46 | 47 | AC_OUTPUT_COMMANDS([ 48 | # Only add multilib support code if we just rebuilt the top-level 49 | # Makefile. 50 | case " $CONFIG_FILES " in 51 | *" ]m4_default([$1],Makefile)[ "*) 52 | ac_file=]m4_default([$1],Makefile)[ . ${multi_basedir}/config-ml.in 53 | ;; 54 | esac], 55 | [ 56 | srcdir="$srcdir" 57 | host="$host" 58 | target="$target" 59 | with_multisubdir="$with_multisubdir" 60 | with_multisrctop="$with_multisrctop" 61 | with_target_subdir="$with_target_subdir" 62 | ac_configure_args="${multilib_arg} ${ac_configure_args}" 63 | multi_basedir="$multi_basedir" 64 | CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} 65 | CC="$CC" 66 | CXX="$CXX" 67 | GFORTRAN="$GFORTRAN" 68 | GDC="$GDC"])])dnl 69 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/config/unwind_ipinfo.m4: -------------------------------------------------------------------------------- 1 | dnl 2 | dnl Check whether _Unwind_GetIPInfo is available without doing a link 3 | dnl test so we can use this with libstdc++-v3 and libjava. Need to 4 | dnl use $target to set defaults because automatic checking is not possible 5 | dnl without a link test (and maybe even with a link test). 6 | dnl 7 | 8 | AC_DEFUN([GCC_CHECK_UNWIND_GETIPINFO], [ 9 | AC_ARG_WITH(system-libunwind, 10 | [ --with-system-libunwind use installed libunwind]) 11 | # If system-libunwind was not specifically set, pick a default setting. 12 | if test x$with_system_libunwind = x; then 13 | case ${target} in 14 | ia64-*-hpux*) with_system_libunwind=yes ;; 15 | *) with_system_libunwind=no ;; 16 | esac 17 | fi 18 | # Based on system-libunwind and target, do we have ipinfo? 19 | if test x$with_system_libunwind = xyes; then 20 | case ${target} in 21 | ia64-*-*) have_unwind_getipinfo=no ;; 22 | *) have_unwind_getipinfo=yes ;; 23 | esac 24 | else 25 | # Darwin before version 9 does not have _Unwind_GetIPInfo. 26 | changequote(,) 27 | case ${target} in 28 | *-*-darwin[3-8]|*-*-darwin[3-8].*) have_unwind_getipinfo=no ;; 29 | *) have_unwind_getipinfo=yes ;; 30 | esac 31 | changequote([,]) 32 | fi 33 | 34 | if test x$have_unwind_getipinfo = xyes; then 35 | AC_DEFINE(HAVE_GETIPINFO, 1, [Define if _Unwind_GetIPInfo is available.]) 36 | fi 37 | ]) 38 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/edtest2.c: -------------------------------------------------------------------------------- 1 | /* edtest2.c -- Test for libbacktrace storage allocation stress handling (p2) 2 | Copyright (C) 2017-2024 Free Software Foundation, Inc. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | (1) Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | (2) Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in 13 | the documentation and/or other materials provided with the 14 | distribution. 15 | 16 | (3) The name of the author may not be used to 17 | endorse or promote products derived from this software without 18 | specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 21 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 22 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 24 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 25 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 27 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 28 | STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 29 | IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | POSSIBILITY OF SUCH DAMAGE. */ 31 | 32 | /* This file intentionally written without any #include's 33 | */ 34 | 35 | extern int f3(int, int); 36 | extern int f2(int); 37 | 38 | int f2(int x) 39 | { 40 | /* Returning a value here and elsewhere avoids a tailcall which 41 | would mess up the backtrace. */ 42 | return f3(x, __LINE__) + 3; 43 | } 44 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/filenames.h: -------------------------------------------------------------------------------- 1 | /* btest.c -- Filename header for libbacktrace library 2 | Copyright (C) 2012-2018 Free Software Foundation, Inc. 3 | Written by Ian Lance Taylor, Google. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are 7 | met: 8 | 9 | (1) Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 12 | (2) Redistributions in binary form must reproduce the above copyright 13 | notice, this list of conditions and the following disclaimer in 14 | the documentation and/or other materials provided with the 15 | distribution. 16 | 17 | (3) The name of the author may not be used to 18 | endorse or promote products derived from this software without 19 | specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 22 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 23 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 25 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 26 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 27 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 28 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 29 | STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 30 | IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 31 | POSSIBILITY OF SUCH DAMAGE. */ 32 | 33 | #ifndef GCC_VERSION 34 | # define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) 35 | #endif 36 | 37 | #if (GCC_VERSION < 2007) 38 | # define __attribute__(x) 39 | #endif 40 | 41 | #ifndef ATTRIBUTE_UNUSED 42 | # define ATTRIBUTE_UNUSED __attribute__ ((__unused__)) 43 | #endif 44 | 45 | #if defined(__MSDOS__) || defined(_WIN32) || defined(__OS2__) || defined (__CYGWIN__) 46 | # define IS_DIR_SEPARATOR(c) ((c) == '/' || (c) == '\\') 47 | # define HAS_DRIVE_SPEC(f) ((f)[0] != '\0' && (f)[1] == ':') 48 | # define IS_ABSOLUTE_PATH(f) (IS_DIR_SEPARATOR((f)[0]) || HAS_DRIVE_SPEC(f)) 49 | #else 50 | # define IS_DIR_SEPARATOR(c) ((c) == '/') 51 | # define IS_ABSOLUTE_PATH(f) (IS_DIR_SEPARATOR((f)[0])) 52 | #endif 53 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/filetype.awk: -------------------------------------------------------------------------------- 1 | # An awk script to determine the type of a file. 2 | /^\177ELF\001/ { if (NR == 1) { print "elf32"; exit } } 3 | /^\177ELF\002/ { if (NR == 1) { print "elf64"; exit } } 4 | /^\114\001/ { if (NR == 1) { print "pecoff"; exit } } 5 | /^\144\206/ { if (NR == 1) { print "pecoff"; exit } } 6 | /^\001\337/ { if (NR == 1) { print "xcoff32"; exit } } 7 | /^\001\367/ { if (NR == 1) { print "xcoff64"; exit } } 8 | /^\376\355\372\316/ { if (NR == 1) { print "macho"; exit } } 9 | /^\316\372\355\376/ { if (NR == 1) { print "macho"; exit } } 10 | /^\376\355\372\317/ { if (NR == 1) { print "macho"; exit } } 11 | /^\317\372\355\376/ { if (NR == 1) { print "macho"; exit } } 12 | /^\312\376\272\276/ { if (NR == 1) { print "macho"; exit } } 13 | /^\276\272\376\312/ { if (NR == 1) { print "macho"; exit } } 14 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/install-debuginfo-for-buildid.sh.in: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # install-debug-info-for-buildid.sh -- Helper script for libbacktrace library 4 | # testing. 5 | # Copyright (C) 2019-2024 Free Software Foundation, Inc. 6 | 7 | # Redistribution and use in source and binary forms, with or without 8 | # modification, are permitted provided that the following conditions are 9 | # met: 10 | 11 | # (1) Redistributions of source code must retain the above copyright 12 | # notice, this list of conditions and the following disclaimer. 13 | 14 | # (2) Redistributions in binary form must reproduce the above copyright 15 | # notice, this list of conditions and the following disclaimer in 16 | # the documentation and/or other materials provided with the 17 | # distribution. 18 | 19 | # (3) The name of the author may not be used to 20 | # endorse or promote products derived from this software without 21 | # specific prior written permission. 22 | 23 | # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 24 | # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 25 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 26 | # DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 27 | # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 28 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 29 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 30 | # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 31 | # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 32 | # IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 33 | # POSSIBILITY OF SUCH DAMAGE. 34 | 35 | # Extract build-id from src, and copy debug info of src to 36 | # $build_id_dir/aa/bb...zz.debug. 37 | 38 | set -e 39 | 40 | sed=@SED@ 41 | awk=@AWK@ 42 | grep=@GREP@ 43 | objcopy=@OBJCOPY@ 44 | readelf=@READELF@ 45 | mkdir_p="@MKDIR_P@" 46 | 47 | build_id_dir="$1" 48 | src="$2" 49 | 50 | buildid=$(LANG=C $readelf -n $src \ 51 | | $grep "Build ID" \ 52 | | $awk '{print $3}') 53 | 54 | prefix=$(echo $buildid \ 55 | | $sed 's/^\(.\{2\}\).*/\1/') 56 | 57 | remainder=$(echo $buildid \ 58 | | $sed 's/^.\{2\}//') 59 | 60 | dir=$build_id_dir/$prefix 61 | dst=$dir/$remainder.debug 62 | 63 | $mkdir_p $dir 64 | 65 | $objcopy --only-keep-debug $src $dst 66 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/nounwind.c: -------------------------------------------------------------------------------- 1 | /* backtrace.c -- Entry point for stack backtrace library. 2 | Copyright (C) 2012-2024 Free Software Foundation, Inc. 3 | Written by Ian Lance Taylor, Google. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are 7 | met: 8 | 9 | (1) Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 12 | (2) Redistributions in binary form must reproduce the above copyright 13 | notice, this list of conditions and the following disclaimer in 14 | the documentation and/or other materials provided with the 15 | distribution. 16 | 17 | (3) The name of the author may not be used to 18 | endorse or promote products derived from this software without 19 | specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 22 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 23 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 25 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 26 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 27 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 28 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 29 | STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 30 | IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 31 | POSSIBILITY OF SUCH DAMAGE. */ 32 | 33 | #include "config.h" 34 | 35 | #include 36 | 37 | #include "backtrace.h" 38 | 39 | #include "internal.h" 40 | 41 | /* This source file is compiled if the unwind library is not 42 | available. */ 43 | 44 | int 45 | backtrace_full (struct backtrace_state *state ATTRIBUTE_UNUSED, 46 | int skip ATTRIBUTE_UNUSED, 47 | backtrace_full_callback callback ATTRIBUTE_UNUSED, 48 | backtrace_error_callback error_callback, void *data) 49 | { 50 | error_callback (data, 51 | "no stack trace because unwind library not available", 52 | 0); 53 | return 0; 54 | } 55 | 56 | int 57 | backtrace_simple (struct backtrace_state *state ATTRIBUTE_UNUSED, 58 | int skip ATTRIBUTE_UNUSED, 59 | backtrace_simple_callback callback ATTRIBUTE_UNUSED, 60 | backtrace_error_callback error_callback, void *data) 61 | { 62 | error_callback (data, 63 | "no stack trace because unwind library not available", 64 | 0); 65 | return 0; 66 | } 67 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/state.c: -------------------------------------------------------------------------------- 1 | /* state.c -- Create the backtrace state. 2 | Copyright (C) 2012-2024 Free Software Foundation, Inc. 3 | Written by Ian Lance Taylor, Google. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are 7 | met: 8 | 9 | (1) Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 12 | (2) Redistributions in binary form must reproduce the above copyright 13 | notice, this list of conditions and the following disclaimer in 14 | the documentation and/or other materials provided with the 15 | distribution. 16 | 17 | (3) The name of the author may not be used to 18 | endorse or promote products derived from this software without 19 | specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 22 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 23 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 25 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 26 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 27 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 28 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 29 | STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 30 | IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 31 | POSSIBILITY OF SUCH DAMAGE. */ 32 | 33 | #include "config.h" 34 | 35 | #include 36 | #include 37 | 38 | #include "backtrace.h" 39 | #include "backtrace-supported.h" 40 | #include "internal.h" 41 | 42 | /* Create the backtrace state. This will then be passed to all the 43 | other routines. */ 44 | 45 | struct backtrace_state * 46 | backtrace_create_state (const char *filename, int threaded, 47 | backtrace_error_callback error_callback, 48 | void *data) 49 | { 50 | struct backtrace_state init_state; 51 | struct backtrace_state *state; 52 | 53 | #ifndef HAVE_SYNC_FUNCTIONS 54 | if (threaded) 55 | { 56 | error_callback (data, "backtrace library does not support threads", 0); 57 | return NULL; 58 | } 59 | #endif 60 | 61 | memset (&init_state, 0, sizeof init_state); 62 | init_state.filename = filename; 63 | init_state.threaded = threaded; 64 | 65 | state = ((struct backtrace_state *) 66 | backtrace_alloc (&init_state, sizeof *state, error_callback, data)); 67 | if (state == NULL) 68 | return NULL; 69 | *state = init_state; 70 | 71 | return state; 72 | } 73 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/test_format.c: -------------------------------------------------------------------------------- 1 | /* test_format.c -- Test for libbacktrace library 2 | Copyright (C) 2018-2024 Free Software Foundation, Inc. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | (1) Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | (2) Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in 13 | the documentation and/or other materials provided with the 14 | distribution. 15 | 16 | (3) The name of the author may not be used to 17 | endorse or promote products derived from this software without 18 | specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 21 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 22 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 24 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 25 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 27 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 28 | STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 29 | IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | POSSIBILITY OF SUCH DAMAGE. */ 31 | 32 | /* This program tests the externally visible interfaces of the 33 | libbacktrace library. */ 34 | 35 | #include 36 | #include 37 | #include 38 | #include 39 | #include 40 | 41 | #include "filenames.h" 42 | 43 | #include "backtrace.h" 44 | #include "backtrace-supported.h" 45 | 46 | #include "testlib.h" 47 | 48 | int 49 | main (int argc ATTRIBUTE_UNUSED, char **argv) 50 | { 51 | state = backtrace_create_state (argv[0], BACKTRACE_SUPPORTS_THREADS, 52 | error_callback_create, NULL); 53 | 54 | exit (failures ? EXIT_FAILURE : EXIT_SUCCESS); 55 | } 56 | -------------------------------------------------------------------------------- /src/vendor/libbacktrace/unknown.c: -------------------------------------------------------------------------------- 1 | /* unknown.c -- used when backtrace configury does not know file format. 2 | Copyright (C) 2012-2024 Free Software Foundation, Inc. 3 | Written by Ian Lance Taylor, Google. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are 7 | met: 8 | 9 | (1) Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 12 | (2) Redistributions in binary form must reproduce the above copyright 13 | notice, this list of conditions and the following disclaimer in 14 | the documentation and/or other materials provided with the 15 | distribution. 16 | 17 | (3) The name of the author may not be used to 18 | endorse or promote products derived from this software without 19 | specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 22 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 23 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 25 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 26 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 27 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 28 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 29 | STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 30 | IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 31 | POSSIBILITY OF SUCH DAMAGE. */ 32 | 33 | #include "config.h" 34 | 35 | #include 36 | 37 | #include "backtrace.h" 38 | #include "internal.h" 39 | 40 | /* A trivial routine that always fails to find fileline data. */ 41 | 42 | static int 43 | unknown_fileline (struct backtrace_state *state ATTRIBUTE_UNUSED, 44 | uintptr_t pc, backtrace_full_callback callback, 45 | backtrace_error_callback error_callback ATTRIBUTE_UNUSED, 46 | void *data) 47 | 48 | { 49 | return callback (data, pc, NULL, 0, NULL); 50 | } 51 | 52 | /* Initialize the backtrace data when we don't know how to read the 53 | debug info. */ 54 | 55 | int 56 | backtrace_initialize (struct backtrace_state *state ATTRIBUTE_UNUSED, 57 | const char *filename ATTRIBUTE_UNUSED, 58 | int descriptor ATTRIBUTE_UNUSED, 59 | backtrace_error_callback error_callback ATTRIBUTE_UNUSED, 60 | void *data ATTRIBUTE_UNUSED, fileline *fileline_fn) 61 | { 62 | state->fileline_data = NULL; 63 | *fileline_fn = unknown_fileline; 64 | return 1; 65 | } 66 | -------------------------------------------------------------------------------- /src/vendor/regenerate_libbacktrace.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -Eeuo pipefail 3 | 4 | if [[ $# -ne 0 ]] && [[ $# -ne 1 ]]; then 5 | echo "Usage: $0 [new-commit]" 6 | exit 1 7 | fi 8 | 9 | old_snapshot=7e2b7da3d6568d2e4e78658f22e701746a48d7e1 10 | new_snapshot=${1:-} 11 | 12 | echo ">>> Cloning libbacktrace" 13 | rm -rf libbacktrace 14 | git clone https://github.com/ianlancetaylor/libbacktrace.git libbacktrace 15 | 16 | echo "Applying patches" 17 | cd libbacktrace 18 | git checkout "$old_snapshot" 19 | git am ../libbacktrace-patches/* 20 | 21 | if [[ -n "$new_snapshot" ]]; then 22 | echo "Rebasing on $new_snapshot" 23 | if git rebase "$new_snapshot"; then 24 | echo "Rebased successfully. Updating patches." 25 | (cd ../libbacktrace-patches && git rm -f 0*) 26 | git format-patch "$new_snapshot" --no-numbered --output-directory=../libbacktrace-patches 27 | (cd ../libbacktrace-patches && git add 0*) 28 | else 29 | echo "Failed to apply patches. You must finish rebasing manually." 30 | echo "When you are satisfied, update the patches by running:" 31 | echo " git format-patch $new_snapshot --no-numbered --output-directory=../libbacktrace-patches" 32 | echo "Be sure to remove the old patches first if the file names will change." 33 | exit 1 34 | fi 35 | fi 36 | 37 | rm -rf .git 38 | 39 | echo "Regenerated vendored libbacktrace" 40 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import sys 3 | 4 | import pytest 5 | from packaging import version 6 | 7 | SNAPSHOT_MINIMUM_VERSIONS = { 8 | "textual": "3.1.0", 9 | "pytest-textual-snapshot": "1.1.0", 10 | } 11 | 12 | 13 | @pytest.fixture 14 | def free_port(): 15 | s = socket.socket() 16 | s.bind(("", 0)) 17 | port_number = s.getsockname()[1] 18 | s.close() 19 | return port_number 20 | 21 | 22 | def _snapshot_skip_reason(): 23 | if sys.version_info < (3, 8): 24 | # Every version available for 3.7 is too old 25 | return f"snapshot tests require textual>={SNAPSHOT_MINIMUM_VERSIONS['textual']}" 26 | 27 | from importlib import metadata # Added in 3.8 28 | 29 | for lib, min_ver in SNAPSHOT_MINIMUM_VERSIONS.items(): 30 | try: 31 | ver = version.parse(metadata.version(lib)) 32 | except ImportError: 33 | return f"snapshot tests require {lib} but it is not installed" 34 | 35 | if ver < version.parse(min_ver): 36 | return f"snapshot tests require {lib}>={min_ver} but {ver} is installed" 37 | 38 | return None 39 | 40 | 41 | def pytest_configure(config): 42 | if config.option.update_snapshots: 43 | from importlib import metadata # Added in 3.8 44 | 45 | for lib, min_ver in SNAPSHOT_MINIMUM_VERSIONS.items(): 46 | ver = version.parse(metadata.version(lib)) 47 | if ver != version.parse(min_ver): 48 | pytest.exit( 49 | f"snapshots must be generated with {lib}=={min_ver}" 50 | f" or SNAPSHOT_MINIMUM_VERSIONS must be updated to {ver}" 51 | f" in {__file__}" 52 | ) 53 | return 54 | 55 | reason = _snapshot_skip_reason() 56 | if reason: 57 | config.issue_config_time_warning(UserWarning(reason), stacklevel=2) 58 | config.option.warn_unused_snapshots = True 59 | 60 | 61 | def pytest_collection_modifyitems(config, items): 62 | reason = _snapshot_skip_reason() 63 | if reason: 64 | for item in items: 65 | if "snap_compare" in item.fixturenames: 66 | item.add_marker(pytest.mark.skip(reason=reason)) 67 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/tests/integration/__init__.py -------------------------------------------------------------------------------- /tests/integration/misbehaving_extension/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/tests/integration/misbehaving_extension/__init__.py -------------------------------------------------------------------------------- /tests/integration/misbehaving_extension/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sysconfig 3 | from distutils.core import Extension 4 | from distutils.core import setup 5 | 6 | ROOT = os.path.realpath(os.path.dirname(__file__)) 7 | LDSHARED = os.environ.get("LDSHARED", sysconfig.get_config_var("LDSHARED")) 8 | if LDSHARED: 9 | LDSHARED = LDSHARED.replace("--strip-all", "-g") 10 | os.environ["LDSHARED"] = LDSHARED 11 | 12 | setup( 13 | name="misbehaving", 14 | version="0.0", 15 | ext_modules=[ 16 | Extension( 17 | "misbehaving", 18 | language="c++", 19 | sources=[os.path.join(ROOT, "misbehaving.cpp")], 20 | ), 21 | ], 22 | zip_safe=False, 23 | ) 24 | -------------------------------------------------------------------------------- /tests/integration/multithreaded_extension/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/tests/integration/multithreaded_extension/__init__.py -------------------------------------------------------------------------------- /tests/integration/multithreaded_extension/main.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import testext 4 | 5 | 6 | def foo(): 7 | fifo = sys.argv[1] 8 | with open(sys.argv[1], "w") as fifo: 9 | fifo.write("ready") 10 | testext.sleep10() 11 | 12 | 13 | foo() 14 | -------------------------------------------------------------------------------- /tests/integration/multithreaded_extension/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sysconfig 3 | from distutils.core import Extension 4 | from distutils.core import setup 5 | 6 | ROOT = os.path.realpath(os.path.dirname(__file__)) 7 | LDSHARED = os.environ.get("LDSHARED", sysconfig.get_config_var("LDSHARED")) 8 | if LDSHARED: 9 | LDSHARED = LDSHARED.replace("--strip-all", "-g") 10 | os.environ["LDSHARED"] = LDSHARED 11 | 12 | setup( 13 | name="testext", 14 | version="0.0", 15 | ext_modules=[ 16 | Extension( 17 | "testext", 18 | language="c++", 19 | sources=[os.path.join(ROOT, "testext.cpp")], 20 | extra_compile_args=["-O0", "-g3"], 21 | ), 22 | ], 23 | zip_safe=False, 24 | ) 25 | -------------------------------------------------------------------------------- /tests/integration/native_extension/main.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import testext 4 | 5 | 6 | def foo(): 7 | fifo = sys.argv[1] 8 | with open(sys.argv[1], "w") as fifo: 9 | fifo.write("ready") 10 | testext.sleep10() 11 | 12 | 13 | foo() 14 | -------------------------------------------------------------------------------- /tests/integration/native_extension/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sysconfig 3 | from distutils.core import Extension 4 | from distutils.core import setup 5 | 6 | ROOT = os.path.realpath(os.path.dirname(__file__)) 7 | LDSHARED = os.environ.get("LDSHARED", sysconfig.get_config_var("LDSHARED")) 8 | if LDSHARED: 9 | LDSHARED = LDSHARED.replace("--strip-all", "-g") 10 | os.environ["LDSHARED"] = LDSHARED 11 | 12 | setup( 13 | name="native_ext", 14 | version="0.0", 15 | ext_modules=[ 16 | Extension( 17 | "native_ext", 18 | language="c", 19 | sources=[os.path.join(ROOT, "native_ext.c")], 20 | extra_compile_args=["-O0", "-g3"], 21 | ), 22 | ], 23 | zip_safe=False, 24 | ) 25 | -------------------------------------------------------------------------------- /tests/integration/rpath_extension/ext.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | 5 | static PyObject *hello_world(PyObject *self, PyObject *args) { 6 | // Load the shared library 7 | void *lib_handle = dlopen("sharedlib.so", RTLD_LAZY); 8 | 9 | if (!lib_handle) { 10 | PyErr_SetString(PyExc_RuntimeError, dlerror()); 11 | return NULL; 12 | } 13 | 14 | // Get the function pointer 15 | void (*my_shared_function)() = dlsym(lib_handle, "my_shared_function"); 16 | if (!my_shared_function) { 17 | PyErr_SetString(PyExc_RuntimeError, dlerror()); 18 | dlclose(lib_handle); 19 | return NULL; 20 | } 21 | 22 | // Call the function 23 | my_shared_function(); 24 | 25 | // Close the shared library 26 | dlclose(lib_handle); 27 | 28 | Py_RETURN_NONE; 29 | } 30 | 31 | static PyMethodDef methods[] = { 32 | {"hello_world", hello_world, METH_NOARGS, "Print Hello, World!"}, 33 | {NULL, NULL, 0, NULL} 34 | }; 35 | 36 | static struct PyModuleDef module = { 37 | PyModuleDef_HEAD_INIT, 38 | "ext", 39 | NULL, 40 | -1, 41 | methods 42 | }; 43 | 44 | PyMODINIT_FUNC PyInit_ext(void) { 45 | return PyModule_Create(&module); 46 | } 47 | -------------------------------------------------------------------------------- /tests/integration/rpath_extension/setup.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | from setuptools import Extension 4 | from setuptools import setup 5 | 6 | # Compile the shared library before building the extension 7 | subprocess.run( 8 | ["gcc", "-shared", "-o", "sharedlibs/sharedlib.so", "sharedlibs/sharedlib.c"] 9 | ) 10 | 11 | 12 | setup( 13 | name="ext", 14 | version="1.0", 15 | ext_modules=[ 16 | Extension( 17 | "ext", 18 | sources=["ext.c"], 19 | extra_link_args=["-Wl,-rpath,$ORIGIN/sharedlibs"], 20 | ) 21 | ], 22 | ) 23 | -------------------------------------------------------------------------------- /tests/integration/rpath_extension/sharedlibs/sharedlib.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | void my_shared_function() { 4 | printf("This is a function from your_shared_lib!\n"); 5 | } 6 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bloomberg/memray/cca8143814da33a807e192577dbe5bf2543f891d/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture(autouse=True) 5 | def use_80_columns(monkeypatch): 6 | """Override the COLUMNS environment variable to 80. 7 | 8 | This matches the assumed terminal width that is hardcoded in the tests. 9 | """ 10 | monkeypatch.setenv("COLUMNS", "80") 11 | -------------------------------------------------------------------------------- /tests/unit/test_attach.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import pytest 4 | 5 | from memray.commands import main 6 | 7 | 8 | @patch("memray.commands.attach.debugger_available") 9 | class TestAttachSubCommand: 10 | def test_memray_attach_aggregated_without_output_file( 11 | self, is_debugger_available_mock, capsys 12 | ): 13 | # GIVEN 14 | is_debugger_available_mock.return_value = True 15 | 16 | # WHEN 17 | with pytest.raises(SystemExit): 18 | main(["attach", "--aggregate", "1234"]) 19 | 20 | captured = capsys.readouterr() 21 | print("Error", captured.err) 22 | assert "Can't use aggregated mode without an output file." in captured.err 23 | -------------------------------------------------------------------------------- /tests/unit/test_templates.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from memray.reporters.templates import get_report_title 4 | 5 | 6 | @pytest.mark.parametrize( 7 | ["kind", "show_memory_leaks", "inverted", "expected"], 8 | [ 9 | ("flamegraph", False, False, "flamegraph report"), 10 | ("flamegraph", True, False, "flamegraph report (memory leaks)"), 11 | ("table", False, False, "table report"), 12 | ("table", True, False, "table report (memory leaks)"), 13 | ("flamegraph", False, True, "inverted flamegraph report"), 14 | ("flamegraph", True, True, "inverted flamegraph report (memory leaks)"), 15 | ], 16 | ) 17 | def test_title_for_regular_report(kind, show_memory_leaks, inverted, expected): 18 | assert ( 19 | get_report_title( 20 | kind=kind, show_memory_leaks=show_memory_leaks, inverted=inverted 21 | ) 22 | == expected 23 | ) 24 | -------------------------------------------------------------------------------- /tests/unit/test_tracker.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from memray import Tracker 6 | 7 | 8 | def test_two_trackers_cannot_be_active_at_the_same_time(tmpdir): 9 | # GIVEN 10 | output = Path(tmpdir) / "test.bin" 11 | output2 = Path(tmpdir) / "test2.bin" 12 | 13 | # WHEN 14 | with Tracker(output): 15 | # THEN 16 | with pytest.raises(RuntimeError): 17 | with Tracker(output2): 18 | pass 19 | 20 | 21 | def test_the_same_tracker_cannot_be_activated_twice(tmpdir): 22 | # GIVEN 23 | output = Path(tmpdir) / "test.bin" 24 | 25 | # WHEN 26 | tracker = Tracker(output) 27 | with tracker: 28 | # Remove the file so we are not stopped by the file existence check 29 | output.unlink() 30 | # THEN 31 | with pytest.raises(RuntimeError): 32 | with tracker: 33 | pass 34 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require("path"); 2 | const TerserPlugin = require("terser-webpack-plugin"); 3 | 4 | module.exports = { 5 | mode: "production", 6 | entry: { 7 | flamegraph_common: "./src/memray/reporters/assets/flamegraph_common.js", 8 | flamegraph: "./src/memray/reporters/assets/flamegraph.js", 9 | temporal_flamegraph: "./src/memray/reporters/assets/temporal_flamegraph.js", 10 | table: "./src/memray/reporters/assets/table.js", 11 | }, 12 | output: { 13 | path: path.resolve("src/memray/reporters/templates/assets"), 14 | filename: "[name].js", 15 | }, 16 | externals: { 17 | _: "lodash", 18 | }, 19 | optimization: { 20 | minimize: true, 21 | minimizer: [ 22 | new TerserPlugin({ 23 | extractComments: false, 24 | }), 25 | ], 26 | }, 27 | }; 28 | --------------------------------------------------------------------------------