├── .clang-format
├── .gitattributes
├── .github
└── workflows
│ └── c-cpp.yml
├── .gitignore
├── .p4ignore
├── 3RDPARTYLICENSES.TXT
├── CMakeLists.txt
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── _config.yml
├── benchmark
├── CMakeLists.txt
└── source
│ ├── BenchmarkAlgorithm.cpp
│ ├── BenchmarkBitset.cpp
│ ├── BenchmarkDeque.cpp
│ ├── BenchmarkHash.cpp
│ ├── BenchmarkHeap.cpp
│ ├── BenchmarkList.cpp
│ ├── BenchmarkMap.cpp
│ ├── BenchmarkSet.cpp
│ ├── BenchmarkSort.cpp
│ ├── BenchmarkString.cpp
│ ├── BenchmarkTupleVector.cpp
│ ├── BenchmarkVector.cpp
│ ├── EASTLBenchmark.cpp
│ ├── EASTLBenchmark.h
│ └── main.cpp
├── doc
├── Benchmarks.md
├── BestPractices.md
├── Bonus
│ └── tuple_vector_readme.md
├── CMake
│ └── EASTL_Project_Integration.md
├── Design.md
├── EASTL-n2271.pdf
├── EASTL.natvis
├── FAQ.md
├── Glossary.md
├── Gotchas.md
├── Introduction.md
├── Maintenance.md
├── Modules.md
├── html
│ ├── EASTL Benchmarks.html
│ ├── EASTL Best Practices.html
│ ├── EASTL Design.html
│ ├── EASTL FAQ.html
│ ├── EASTL Glossary.html
│ ├── EASTL Gotchas.html
│ ├── EASTL Introduction.html
│ ├── EASTL Maintenance.html
│ ├── EASTL Modules.html
│ └── EASTLDoc.css
└── quick-reference.pdf
├── include
└── EASTL
│ ├── algorithm.h
│ ├── allocator.h
│ ├── allocator_malloc.h
│ ├── any.h
│ ├── array.h
│ ├── atomic.h
│ ├── bit.h
│ ├── bitset.h
│ ├── bitvector.h
│ ├── bonus
│ ├── adaptors.h
│ ├── call_traits.h
│ ├── compressed_pair.h
│ ├── fixed_ring_buffer.h
│ ├── fixed_tuple_vector.h
│ ├── intrusive_sdlist.h
│ ├── intrusive_slist.h
│ ├── list_map.h
│ ├── lru_cache.h
│ ├── overloaded.h
│ ├── ring_buffer.h
│ ├── sort_extra.h
│ └── tuple_vector.h
│ ├── chrono.h
│ ├── compare.h
│ ├── core_allocator.h
│ ├── core_allocator_adapter.h
│ ├── deque.h
│ ├── finally.h
│ ├── fixed_allocator.h
│ ├── fixed_function.h
│ ├── fixed_hash_map.h
│ ├── fixed_hash_set.h
│ ├── fixed_list.h
│ ├── fixed_map.h
│ ├── fixed_set.h
│ ├── fixed_slist.h
│ ├── fixed_string.h
│ ├── fixed_substring.h
│ ├── fixed_vector.h
│ ├── functional.h
│ ├── hash_map.h
│ ├── hash_set.h
│ ├── heap.h
│ ├── initializer_list.h
│ ├── internal
│ ├── atomic
│ │ ├── arch
│ │ │ ├── arch.h
│ │ │ ├── arch_add_fetch.h
│ │ │ ├── arch_and_fetch.h
│ │ │ ├── arch_cmpxchg_strong.h
│ │ │ ├── arch_cmpxchg_weak.h
│ │ │ ├── arch_compiler_barrier.h
│ │ │ ├── arch_cpu_pause.h
│ │ │ ├── arch_exchange.h
│ │ │ ├── arch_fetch_add.h
│ │ │ ├── arch_fetch_and.h
│ │ │ ├── arch_fetch_or.h
│ │ │ ├── arch_fetch_sub.h
│ │ │ ├── arch_fetch_xor.h
│ │ │ ├── arch_load.h
│ │ │ ├── arch_memory_barrier.h
│ │ │ ├── arch_or_fetch.h
│ │ │ ├── arch_signal_fence.h
│ │ │ ├── arch_store.h
│ │ │ ├── arch_sub_fetch.h
│ │ │ ├── arch_thread_fence.h
│ │ │ ├── arch_xor_fetch.h
│ │ │ ├── arm
│ │ │ │ ├── arch_arm.h
│ │ │ │ ├── arch_arm_load.h
│ │ │ │ ├── arch_arm_memory_barrier.h
│ │ │ │ ├── arch_arm_store.h
│ │ │ │ └── arch_arm_thread_fence.h
│ │ │ └── x86
│ │ │ │ ├── arch_x86.h
│ │ │ │ ├── arch_x86_add_fetch.h
│ │ │ │ ├── arch_x86_and_fetch.h
│ │ │ │ ├── arch_x86_cmpxchg_strong.h
│ │ │ │ ├── arch_x86_cmpxchg_weak.h
│ │ │ │ ├── arch_x86_exchange.h
│ │ │ │ ├── arch_x86_fetch_add.h
│ │ │ │ ├── arch_x86_fetch_and.h
│ │ │ │ ├── arch_x86_fetch_or.h
│ │ │ │ ├── arch_x86_fetch_sub.h
│ │ │ │ ├── arch_x86_fetch_xor.h
│ │ │ │ ├── arch_x86_load.h
│ │ │ │ ├── arch_x86_memory_barrier.h
│ │ │ │ ├── arch_x86_or_fetch.h
│ │ │ │ ├── arch_x86_store.h
│ │ │ │ ├── arch_x86_sub_fetch.h
│ │ │ │ ├── arch_x86_thread_fence.h
│ │ │ │ └── arch_x86_xor_fetch.h
│ │ ├── atomic.h
│ │ ├── atomic_asserts.h
│ │ ├── atomic_base_width.h
│ │ ├── atomic_casts.h
│ │ ├── atomic_flag.h
│ │ ├── atomic_flag_standalone.h
│ │ ├── atomic_integral.h
│ │ ├── atomic_macros.h
│ │ ├── atomic_macros
│ │ │ ├── atomic_macros.h
│ │ │ ├── atomic_macros_add_fetch.h
│ │ │ ├── atomic_macros_and_fetch.h
│ │ │ ├── atomic_macros_base.h
│ │ │ ├── atomic_macros_cmpxchg_strong.h
│ │ │ ├── atomic_macros_cmpxchg_weak.h
│ │ │ ├── atomic_macros_compiler_barrier.h
│ │ │ ├── atomic_macros_cpu_pause.h
│ │ │ ├── atomic_macros_exchange.h
│ │ │ ├── atomic_macros_fetch_add.h
│ │ │ ├── atomic_macros_fetch_and.h
│ │ │ ├── atomic_macros_fetch_or.h
│ │ │ ├── atomic_macros_fetch_sub.h
│ │ │ ├── atomic_macros_fetch_xor.h
│ │ │ ├── atomic_macros_load.h
│ │ │ ├── atomic_macros_memory_barrier.h
│ │ │ ├── atomic_macros_or_fetch.h
│ │ │ ├── atomic_macros_signal_fence.h
│ │ │ ├── atomic_macros_store.h
│ │ │ ├── atomic_macros_sub_fetch.h
│ │ │ ├── atomic_macros_thread_fence.h
│ │ │ └── atomic_macros_xor_fetch.h
│ │ ├── atomic_memory_order.h
│ │ ├── atomic_pointer.h
│ │ ├── atomic_size_aligned.h
│ │ ├── atomic_standalone.h
│ │ └── compiler
│ │ │ ├── compiler.h
│ │ │ ├── compiler_add_fetch.h
│ │ │ ├── compiler_and_fetch.h
│ │ │ ├── compiler_barrier.h
│ │ │ ├── compiler_cmpxchg_strong.h
│ │ │ ├── compiler_cmpxchg_weak.h
│ │ │ ├── compiler_cpu_pause.h
│ │ │ ├── compiler_exchange.h
│ │ │ ├── compiler_fetch_add.h
│ │ │ ├── compiler_fetch_and.h
│ │ │ ├── compiler_fetch_or.h
│ │ │ ├── compiler_fetch_sub.h
│ │ │ ├── compiler_fetch_xor.h
│ │ │ ├── compiler_load.h
│ │ │ ├── compiler_memory_barrier.h
│ │ │ ├── compiler_or_fetch.h
│ │ │ ├── compiler_signal_fence.h
│ │ │ ├── compiler_store.h
│ │ │ ├── compiler_sub_fetch.h
│ │ │ ├── compiler_thread_fence.h
│ │ │ ├── compiler_xor_fetch.h
│ │ │ ├── gcc
│ │ │ ├── compiler_gcc.h
│ │ │ ├── compiler_gcc_add_fetch.h
│ │ │ ├── compiler_gcc_and_fetch.h
│ │ │ ├── compiler_gcc_barrier.h
│ │ │ ├── compiler_gcc_cmpxchg_strong.h
│ │ │ ├── compiler_gcc_cmpxchg_weak.h
│ │ │ ├── compiler_gcc_cpu_pause.h
│ │ │ ├── compiler_gcc_exchange.h
│ │ │ ├── compiler_gcc_fetch_add.h
│ │ │ ├── compiler_gcc_fetch_and.h
│ │ │ ├── compiler_gcc_fetch_or.h
│ │ │ ├── compiler_gcc_fetch_sub.h
│ │ │ ├── compiler_gcc_fetch_xor.h
│ │ │ ├── compiler_gcc_load.h
│ │ │ ├── compiler_gcc_or_fetch.h
│ │ │ ├── compiler_gcc_signal_fence.h
│ │ │ ├── compiler_gcc_store.h
│ │ │ ├── compiler_gcc_sub_fetch.h
│ │ │ ├── compiler_gcc_thread_fence.h
│ │ │ └── compiler_gcc_xor_fetch.h
│ │ │ └── msvc
│ │ │ ├── compiler_msvc.h
│ │ │ ├── compiler_msvc_add_fetch.h
│ │ │ ├── compiler_msvc_and_fetch.h
│ │ │ ├── compiler_msvc_barrier.h
│ │ │ ├── compiler_msvc_cmpxchg_strong.h
│ │ │ ├── compiler_msvc_cmpxchg_weak.h
│ │ │ ├── compiler_msvc_cpu_pause.h
│ │ │ ├── compiler_msvc_exchange.h
│ │ │ ├── compiler_msvc_fetch_add.h
│ │ │ ├── compiler_msvc_fetch_and.h
│ │ │ ├── compiler_msvc_fetch_or.h
│ │ │ ├── compiler_msvc_fetch_sub.h
│ │ │ ├── compiler_msvc_fetch_xor.h
│ │ │ ├── compiler_msvc_or_fetch.h
│ │ │ ├── compiler_msvc_signal_fence.h
│ │ │ ├── compiler_msvc_sub_fetch.h
│ │ │ └── compiler_msvc_xor_fetch.h
│ ├── char_traits.h
│ ├── concepts.h
│ ├── config.h
│ ├── copy_help.h
│ ├── enable_shared.h
│ ├── fill_help.h
│ ├── fixed_pool.h
│ ├── function.h
│ ├── function_detail.h
│ ├── function_help.h
│ ├── functional_base.h
│ ├── generic_iterator.h
│ ├── hashtable.h
│ ├── in_place_t.h
│ ├── integer_sequence.h
│ ├── intrusive_hashtable.h
│ ├── mem_fn.h
│ ├── memory_base.h
│ ├── move_help.h
│ ├── pair_fwd_decls.h
│ ├── piecewise_construct_t.h
│ ├── red_black_tree.h
│ ├── smart_ptr.h
│ ├── thread_support.h
│ ├── tuple_fwd_decls.h
│ ├── type_compound.h
│ ├── type_detected.h
│ ├── type_fundamental.h
│ ├── type_pod.h
│ ├── type_properties.h
│ ├── type_transformations.h
│ └── type_void_t.h
│ ├── intrusive_hash_map.h
│ ├── intrusive_hash_set.h
│ ├── intrusive_list.h
│ ├── intrusive_ptr.h
│ ├── iterator.h
│ ├── linked_array.h
│ ├── linked_ptr.h
│ ├── list.h
│ ├── map.h
│ ├── memory.h
│ ├── meta.h
│ ├── numeric.h
│ ├── numeric_limits.h
│ ├── optional.h
│ ├── priority_queue.h
│ ├── queue.h
│ ├── random.h
│ ├── ratio.h
│ ├── safe_ptr.h
│ ├── scoped_array.h
│ ├── scoped_ptr.h
│ ├── segmented_vector.h
│ ├── set.h
│ ├── shared_array.h
│ ├── shared_ptr.h
│ ├── slist.h
│ ├── sort.h
│ ├── span.h
│ ├── stack.h
│ ├── string.h
│ ├── string_hash_map.h
│ ├── string_map.h
│ ├── string_view.h
│ ├── tuple.h
│ ├── type_traits.h
│ ├── unique_ptr.h
│ ├── unordered_map.h
│ ├── unordered_set.h
│ ├── utility.h
│ ├── variant.h
│ ├── vector.h
│ ├── vector_map.h
│ ├── vector_multimap.h
│ ├── vector_multiset.h
│ ├── vector_set.h
│ ├── version.h
│ └── weak_ptr.h
├── scripts
├── CMake
│ └── CommonCppFlags.cmake
└── build.sh
├── source
├── allocator_eastl.cpp
├── assert.cpp
├── atomic.cpp
├── fixed_pool.cpp
├── hashtable.cpp
├── intrusive_list.cpp
├── numeric_limits.cpp
├── red_black_tree.cpp
├── string.cpp
└── thread_support.cpp
└── test
├── CMakeLists.txt
└── source
├── ConceptImpls.h
├── EASTLTest.cpp
├── EASTLTest.h
├── EASTLTestAllocator.cpp
├── EASTLTestAllocator.h
├── EASTLTestIterators.h
├── GetTypeName.h
├── TestAlgorithm.cpp
├── TestAllocator.cpp
├── TestAllocatorPropagate.cpp
├── TestAny.cpp
├── TestArray.cpp
├── TestAtomicAsm.cpp
├── TestAtomicBasic.cpp
├── TestAtomicMultiThreaded.cpp
├── TestBit.cpp
├── TestBitVector.cpp
├── TestBitcast.cpp
├── TestBitset.cpp
├── TestCharTraits.cpp
├── TestChrono.cpp
├── TestConcepts.cpp
├── TestCppCXTypeTraits.cpp
├── TestDeque.cpp
├── TestExtra.cpp
├── TestFinally.cpp
├── TestFixedFunction.cpp
├── TestFixedHash.cpp
├── TestFixedList.cpp
├── TestFixedMap.cpp
├── TestFixedSList.cpp
├── TestFixedSet.cpp
├── TestFixedString.cpp
├── TestFixedTupleVector.cpp
├── TestFixedVector.cpp
├── TestFunctional.cpp
├── TestHash.cpp
├── TestHeap.cpp
├── TestIntrusiveHash.cpp
├── TestIntrusiveList.cpp
├── TestIntrusiveSDList.cpp
├── TestIntrusiveSList.cpp
├── TestIterator.cpp
├── TestList.cpp
├── TestListMap.cpp
├── TestLruCache.cpp
├── TestMap.cpp
├── TestMap.h
├── TestMemory.cpp
├── TestMeta.cpp
├── TestNumericLimits.cpp
├── TestOptional.cpp
├── TestRandom.cpp
├── TestRatio.cpp
├── TestRingBuffer.cpp
├── TestSList.cpp
├── TestSegmentedVector.cpp
├── TestSet.cpp
├── TestSet.h
├── TestSmartPtr.cpp
├── TestSort.cpp
├── TestSpan.cpp
├── TestString.cpp
├── TestString.inl
├── TestStringHashMap.cpp
├── TestStringMap.cpp
├── TestStringView.cpp
├── TestStringView.inl
├── TestTuple.cpp
├── TestTupleVector.cpp
├── TestTypeTraits.cpp
├── TestUtility.cpp
├── TestVariant.cpp
├── TestVariant2.cpp
├── TestVector.cpp
├── TestVectorMap.cpp
├── TestVectorSet.cpp
└── main.cpp
/.clang-format:
--------------------------------------------------------------------------------
1 | #-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-
2 | Language : Cpp
3 | BasedOnStyle : Google
4 | Standard : Auto
5 | #-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-
6 | AccessModifierOffset : -4
7 | AlignTrailingComments : true
8 | AllowAllParametersOfDeclarationOnNextLine : false
9 | AllowShortBlocksOnASingleLine : true
10 | AllowShortFunctionsOnASingleLine : true
11 | AllowShortIfStatementsOnASingleLine : false
12 | AllowShortLoopsOnASingleLine : false
13 | BinPackParameters : false
14 | BreakBeforeBraces : Allman
15 | BreakBeforeTernaryOperators : false
16 | BreakConstructorInitializersBeforeComma : true
17 | ColumnLimit : 120
18 | Cpp11BracedListStyle : true
19 | DerivePointerAlignment : true
20 | DerivePointerBinding : false
21 | IndentWidth : 4
22 | KeepEmptyLinesAtTheStartOfBlocks : true
23 | MaxEmptyLinesToKeep : 2
24 | NamespaceIndentation : All
25 | PointerBindsToType : true
26 | SpacesBeforeTrailingComments : 1
27 | SpacesInAngles : false
28 | SpacesInSquareBrackets : false
29 | TabWidth : 4
30 | UseTab : ForIndentation
31 | #-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-
32 | #-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-
33 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | # http://git-scm.com/docs/gitattributes
3 | * text=auto
4 | .appveyor.yml -text eol=crlf
5 | .appveyor-mingw.yml -text eol=crlf
6 | ci-*.cmd -text eol=crlf
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | tags
2 | cscope.out
3 | **/*.swp
4 | **/*.swo
5 | .swp
6 | *.swp
7 | .swo
8 | .TMP
9 | -.d
10 | eastl_build_out
11 | build_bench
12 | bench.bat
13 | build.bat
14 | .p4config
15 |
16 | ## CMake generated files
17 | CMakeCache.txt
18 | cmake_install.cmake
19 |
20 | ## Patch files
21 | *.patch
22 |
23 | ## For Visual Studio Generated projects
24 | *.sln
25 | **/*.vcxproj
26 | **/*.vcxproj.filters
27 | *.VC.opendb
28 | *.sdf
29 | **/*.suo
30 | **/*.user
31 | .vs/*
32 | **/Debug/*
33 | CMakeFiles/*
34 | EASTL.dir/**
35 | RelWithDebInfo/*
36 | Release/*
37 | Win32/*
38 | x64/*
39 | MinSizeRel/*
40 | build*/*
41 | Testing/*
42 | %ALLUSERSPROFILE%/*
43 |
44 | # Buck
45 | /buck-out/
46 | /.buckd/
47 | /buckaroo/
48 | .buckconfig.local
49 | BUCKAROO_DEPS
50 | .vscode/settings.json
51 |
--------------------------------------------------------------------------------
/.p4ignore:
--------------------------------------------------------------------------------
1 | /.git/
2 | tags
3 | .gitignore
4 | cscope.out
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | ## Contributing
2 |
3 | Before you can contribute, EA must have a Contributor License Agreement (CLA) on file that has been signed by each contributor.
4 | You can sign here: [Go to CLA](https://electronicarts.na1.echosign.com/public/esignWidget?wid=CBFCIBAA3AAABLblqZhByHRvZqmltGtliuExmuV-WNzlaJGPhbSRg2ufuPsM3P0QmILZjLpkGslg24-UJtek*)
5 |
6 | If you want to be recognized for your contributions to EASTL or have a project using EASTL be recognized; you can submit a pull request to the appropriate sections in [README.md](README.md).
7 | Some examples of what the format and information will look like is as follows.
8 | * John Smith - jsmith@domain.com
9 | * John Smith
10 | * Frostbite - Electronic Arts
11 | * My Project - [link to said project]
12 |
13 | ### Pull Request Policy
14 |
15 | All code contributions to EASTL are submitted as [Github pull requests](https://help.github.com/articles/using-pull-requests/). All pull requests will be reviewed by an EASTL maintainer according to the guidelines found in the next section.
16 |
17 | Your pull request should:
18 |
19 | * merge cleanly
20 | * come with tests
21 | * tests should be minimal and stable
22 | * fail before your fix is applied
23 | * pass the test suite
24 | * code formatting is encoded in clang format
25 | * limit using clang format on new code
26 | * do not deviate from style already established in the files
27 |
28 | ### Getting the Repository
29 |
30 | ```bash
31 | git clone https://github.com/electronicarts/EASTL
32 | ```
33 |
34 | ### Running the Unit Tests
35 |
36 | EASTL uses CMake as its build system.
37 |
38 | * Create and navigate to "your_build_folder":
39 | * mkdir your_build_folder && cd your_build_folder
40 | * Generate build scripts:
41 | * cmake eastl_source_folder -DEASTL_BUILD_TESTS:BOOL=ON
42 | * Build unit tests for "your_config":
43 | * cmake --build . --config your_config
44 | * Run the unit tests for "your_config" from the test folder:
45 | * cd test && ctest -C your_config
46 |
47 | Here is an example batch file.
48 | ```batch
49 | set build_folder=out
50 | mkdir %build_folder%
51 | pushd %build_folder%
52 | call cmake .. -DEASTL_BUILD_TESTS:BOOL=ON -DEASTL_BUILD_BENCHMARK:BOOL=OFF
53 | call cmake --build . --config Release
54 | call cmake --build . --config Debug
55 | call cmake --build . --config RelWithDebInfo
56 | call cmake --build . --config MinSizeRel
57 | pushd test
58 | call ctest -C Release
59 | call ctest -C Debug
60 | call ctest -C RelWithDebInfo
61 | call ctest -C MinSizeRel
62 | popd
63 | popd
64 | ```
65 |
66 | Here is an example bash file
67 | ```bash
68 | build_folder=out
69 | mkdir $build_folder
70 | pushd $build_folder
71 | cmake .. -DEASTL_BUILD_TESTS:BOOL=ON -DEASTL_BUILD_BENCHMARK:BOOL=OFF
72 | cmake --build . --config Release
73 | cmake --build . --config Debug
74 | cmake --build . --config RelWithDebInfo
75 | cmake --build . --config MinSizeRel
76 | pushd test
77 | ctest -C Release
78 | ctest -C Debug
79 | ctest -C RelWithDebInfo
80 | ctest -C MinSizeRel
81 | popd
82 | popd
83 | ```
84 |
85 | The value of EASTL_BUILD_BENCHMARK can be toggled to `ON` in order to build projects that include the benchmark program.
86 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2019, Electronic Arts
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # EA Standard Template Library
2 |
3 | [](https://travis-ci.org/electronicarts/EASTL)
4 |
5 | EASTL stands for Electronic Arts Standard Template Library. It is a C++ template library of containers, algorithms, and iterators useful for runtime and tool development across multiple platforms. It is a fairly extensive and robust implementation of such a library and has an emphasis on high performance above all other considerations.
6 |
7 |
8 | ## Usage
9 |
10 | If you are familiar with the C++ STL or have worked with other templated container/algorithm libraries, you probably don't need to read this. If you have no familiarity with C++ templates at all, then you probably will need more than this document to get you up to speed. In this case, you need to understand that templates, when used properly, are powerful vehicles for the ease of creation of optimized C++ code. A description of C++ templates is outside the scope of this documentation, but there is plenty of such documentation on the Internet.
11 |
12 | EASTL is suitable for any tools and shipping applications where the functionality of EASTL is useful. Modern compilers are capable of producing good code with templates and many people are using them in both current generation and future generation applications on multiple platforms from embedded systems to servers and mainframes.
13 |
14 | ## Package Managers
15 |
16 | You can download and install EASTL using the [Conan](https://github.com/conan-io/conan) package manager:
17 |
18 | conan install eastl/3.15.00@
19 |
20 | The EASTL package in conan is kept up to date by Conan team members and community contributors. If the version is out-of-date, please [create an issue or pull request](https://github.com/conan-io/conan-center-index) on the Conan Center Index repository.
21 |
22 |
23 | You can download and install EASTL using the [vcpkg](https://github.com/Microsoft/vcpkg) dependency manager:
24 |
25 | git clone https://github.com/Microsoft/vcpkg.git
26 | cd vcpkg
27 | ./bootstrap-vcpkg.sh
28 | ./vcpkg integrate install
29 | vcpkg install eastl
30 |
31 | The EASTL port in vcpkg is kept up to date by Microsoft team members and community contributors. If the version is out of date, please [create an issue or pull request](https://github.com/Microsoft/vcpkg) on the vcpkg repository.
32 |
33 |
34 | ## Documentation
35 |
36 | Please see [EASTL Introduction](doc/Introduction.md).
37 |
38 |
39 | ## Compiling sources
40 |
41 | Please see [CONTRIBUTING.md](CONTRIBUTING.md) for details on compiling and testing the source.
42 |
43 | ## Credits And Maintainers
44 |
45 | EASTL was created by Paul Pedriana and he maintained the project for roughly 10 years.
46 |
47 | EASTL was subsequently maintained by Roberto Parolin for more than 8 years.
48 | He was the driver and proponent for getting EASTL opensourced.
49 | Rob was a mentor to all members of the team and taught us everything we ever wanted to know about C++ spookyness.
50 |
51 | After Rob, maintenance of EASTL passed to Max Winkler for roughly a year, until landing with its current maintainer Liam Mitchell.
52 |
53 | Significant EASTL contributions were made by (in alphabetical order):
54 |
55 | * Avery Lee
56 | * Colin Andrews
57 | * JP Flouret
58 | * Liam Mitchell
59 | * Matt Newport
60 | * Max Winkler
61 | * Paul Pedriana
62 | * Roberto Parolin
63 | * Simon Everett
64 |
65 | ## Contributors
66 |
67 | ## Projects And Products Using EASTL
68 |
69 | * Frostbite - Electronic Arts - [https://www.ea.com/frostbite]
70 |
71 | ## License
72 |
73 | Modified BSD License (3-Clause BSD license) see the file LICENSE in the project root.
74 |
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-minimal
--------------------------------------------------------------------------------
/doc/CMake/EASTL_Project_Integration.md:
--------------------------------------------------------------------------------
1 | ## Using EASTL in your own projects
2 |
3 | This page describes the steps needed to use EASTL in your own projects
4 |
5 | ## Setting up your project
6 |
7 | ### Using CMake
8 |
9 | Add to your CMakeLists.txt:
10 |
11 | ```cmake
12 | set(EASTL_ROOT_DIR C:/EASTL)
13 | include_directories (${EASTL_ROOT_DIR}/include)
14 | include_directories (${EASTL_ROOT_DIR}/test/packages/EAAssert/include)
15 | include_directories (${EASTL_ROOT_DIR}/test/packages/EABase/include/Common)
16 | include_directories (${EASTL_ROOT_DIR}/test/packages/EAMain/include)
17 | include_directories (${EASTL_ROOT_DIR}/test/packages/EAStdC/include)
18 | include_directories (${EASTL_ROOT_DIR}/test/packages/EATest/include)
19 | include_directories (${EASTL_ROOT_DIR}/test/packages/EAThread/include)
20 | set(EASTL_LIBRARY debug ${EASTL_ROOT_DIR}/build/Debug/EASTL.lib optimized ${EASTL_ROOT_DIR}/build/Release/EASTL.lib)
21 | add_custom_target(NatVis SOURCES ${EASTL_ROOT_DIR}/doc/EASTL.natvis)
22 | ```
23 |
24 | And then add the library into the linker
25 |
26 | ```
27 | target_link_libraries(... ${EASTL_LIBRARY})
28 | ```
29 |
30 | ### Using Visual Studio
31 |
32 | Using Visual Studio projecs directly you will need do the following steps:
33 | - Add the include paths
34 | - Add the library path
35 | - Add the library dependency
36 | - Add natvis (optional)
37 |
38 | > Note that in the examples below ${EASTL_ROOT_DIR} is the folder in which you stored EASTL. You could create an environment variable for this.
39 |
40 | #### Add the include paths
41 |
42 | Add the following paths to your C/C++ -> General -> Additional include directories:
43 | ```
44 | ${EASTL_ROOT_DIR}/include
45 | ${EASTL_ROOT_DIR}/test/packages/EAAssert/include
46 | ${EASTL_ROOT_DIR}/test/packages/EABase/include/Common
47 | ${EASTL_ROOT_DIR}/test/packages/EAMain/include)
48 | ${EASTL_ROOT_DIR}/test/packages/EAStdC/include)
49 | ${EASTL_ROOT_DIR}/test/packages/EATest/include)
50 | ${EASTL_ROOT_DIR}/test/packages/EAThread/include)
51 | ```
52 |
53 | #### Add the library path
54 |
55 | Add the following library path to your Linker -> General -> Additional Library Directories:
56 | ```
57 | ${EASTL_ROOT_DIR}/build/$(Configuration)
58 | ```
59 |
60 | #### Add the library dependency
61 |
62 | Either add the following library to your Linker -> Input -> Additional Dependencies
63 | ```
64 | EASTL.lib
65 | ```
66 | Or in code use the following:
67 | ```
68 | #pragma comment(lib, "EASTL.lib")
69 | ```
70 |
71 | #### Add natvis (optional)
72 |
73 | > Adding the natvis file to your project allows the debugger to use custom visualizers for the eastl data types. This greatly enhances the debugging experience.
74 |
75 | Add the natvis file anywhere in your solution:
76 |
77 | ```
78 | Right-click your project: Add -> Existing item and then add the following file:
79 | ${EASTL_ROOT_DIR}/doc/EASTL.natvis
80 | ```
81 |
82 | ## Setting up your code
83 |
84 | ### Overloading operator new[]
85 |
86 | EASTL requires you to have an overload for the operator new[], here is an example that just forwards to global new[]:
87 |
88 | ```c
89 | void* __cdecl operator new[](size_t size, const char* name, int flags, unsigned debugFlags, const char* file, int line)
90 | {
91 | return new uint8_t[size];
92 | }
93 | ```
94 |
--------------------------------------------------------------------------------
/doc/EASTL-n2271.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/electronicarts/EASTL/7fadbf0da01e6f6e0e7038b1b34343a069b8fc51/doc/EASTL-n2271.pdf
--------------------------------------------------------------------------------
/doc/Introduction.md:
--------------------------------------------------------------------------------
1 | # EASTL Introduction
2 |
3 | EASTL stands for Electronic Arts Standard Template Library. It is a C++ template library of containers, algorithms, and iterators useful for runtime and tool development across multiple platforms. It is a fairly extensive and robust implementation of such a library and has an emphasis on high performance above all other considerations.
4 |
5 | ## Intended Audience
6 |
7 | This is a short document intended to provide a basic introduction to EASTL for those new to the concept of EASTL or STL. If you are familiar with the C++ STL or have worked with other templated container/algorithm libraries, you probably don't need to read this. If you have no familiarity with C++ templates at all, then you probably will need more than this document to get you up to speed. In this case you need to understand that templates, when used properly, are powerful vehicles for the ease of creation of optimized C++ code. A description of C++ templates is outside the scope of this documentation, but there is plenty of such documentation on the Internet. See the EASTL FAQ.html document for links to information related to learning templates and STL.
8 |
9 | ## EASTL Modules
10 |
11 | EASTL consists primarily of containers, algorithms, and iterators. An example of a container is a linked list, while an example of an algorithm is a sort function; iterators are the entities of traversal for containers and algorithms. EASTL containers a fairly large number of containers and algorithms, each of which is a very clean, efficient, and unit-tested implementation. We can say with some confidence that you are not likely to find better implementations of these (commercial or otherwise), as these are the result of years of wisdom and diligent work. For a detailed list of EASTL modules, see EASTL Modules.html.
12 |
13 | ## EASTL Suitability
14 |
15 | What uses are EASTL suitable for? Essentially any situation in tools and shipping applications where the functionality of EASTL is useful. Modern compilers are capable of producing good code with templates and many people are using them in both current generation and future generation applications on multiple platforms from embedded systems to servers and mainframes.
16 |
17 | ----------------------------------------------
18 | End of document
--------------------------------------------------------------------------------
/doc/html/EASTL Introduction.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | EASTL Introduction
5 |
6 |
7 |
8 |
9 |
10 |
11 | EASTL Introduction
12 | EASTL stands for Electronic Arts Standard Template Library. It is a C++ template library of containers, algorithms, and
13 | iterators useful for runtime and tool development across multiple platforms. It is a fairly extensive and robust
14 | implementation of such a library and has an emphasis on high performance above all other considerations.
15 | Intended Audience
16 | This is a short document intended to provide a basic introduction to EASTL for
17 | those new to the concept of EASTL or STL. If you are familiar with the C++ STL
18 | or have worked with other templated container/algorithm libraries, you probably
19 | don't need to read this. If you have no familiarity with C++ templates at all,
20 | then you probably will need more than this document to get you up to speed. In
21 | this case you need to understand that templates, when used properly, are powerful
22 | vehicles for the ease of creation of optimized C++ code. A description of C++
23 | templates is outside the scope of this documentation, but there is plenty of such
24 | documentation on the Internet. See the EASTL FAQ.html
25 | document for links to information related to learning templates and STL.
26 | EASTL Modules
27 | EASTL consists primarily of containers, algorithms, and iterators. An example of a container is a linked list, while an
28 | example of an algorithm is a sort function; iterators are the entities of traversal for containers and algorithms.
29 | EASTL containers a fairly large number of containers and algorithms, each of which is a very clean, efficient, and
30 | unit-tested implementation. We can say with some confidence that you are not likely to find better implementations of
31 | these (commercial or otherwise), as these are the result of years of wisdom and diligent work. For a detailed list of
32 | EASTL modules, see EASTL Modules.html .
33 | EASTL Suitability
34 | What uses are EASTL suitable for? Essentially any situation in tools and shipping applications where the functionality
35 | of EASTL is useful. Modern compilers are capable of producing good code with templates and many people are using them
36 | in both current generation and future generation applications on multiple platforms from embedded systems to servers
37 | and mainframes.
38 |
39 | End of document
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/doc/html/EASTLDoc.css:
--------------------------------------------------------------------------------
1 | body
2 | {
3 | font-family: Georgia, "Times New Roman", Times, serif;
4 | font-size: 12pt;
5 | }
6 |
7 | h1
8 | {
9 | font-family: Verdana, Arial, Helvetica, sans-serif;
10 | display: block;
11 | background-color: #BBCCDD;
12 | border: 2px solid #000000;
13 | font-size: 16pt;
14 | font-weight: bold;
15 | padding: 6px;
16 | }
17 |
18 | h2
19 | {
20 | font-size: 14pt;
21 | font-family: Verdana;
22 | border-bottom: 2px solid black;
23 | }
24 |
25 | h3
26 | {
27 | font-family: Verdana;
28 | font-size: 13pt;
29 | font-weight: bold;
30 | }
31 |
32 | .code-example
33 | {
34 | display: block;
35 | background-color: #D1DDE9;
36 | margin-left: 3em;
37 | margin-right: 3em;
38 | margin-top: 1em;
39 | margin-bottom: 1em;
40 | padding: 8px;
41 | border: 2px solid #7993C8;
42 | font-family: "Courier New", Courier, mono;
43 | font-size: 10pt;
44 | white-space: pre;
45 | }
46 |
47 | .code-example-span
48 | {
49 | font-family: "Courier New", Courier, mono;
50 | font-size: 10pt;
51 | white-space: pre;
52 | }
53 |
54 | .code-example-comment
55 | {
56 | background-color: #e0e0f0;
57 | padding: 0px 0px;
58 | font-family: "Courier New", Courier, mono;
59 | font-size: 10pt;
60 | white-space: pre;
61 | color: #999999;
62 | margin: auto auto;
63 | }
64 |
65 |
66 | .faq-question
67 | {
68 | background-color: #D9E2EC;
69 | font-size: 12pt;
70 | font-weight: bold;
71 | margin-top: 0em;
72 | padding-left:5px;
73 | padding-right:8px;
74 | padding-top:2px;
75 | padding-bottom:3px;
76 | margin-bottom: 0.5em;
77 | }
78 |
79 | .faq-answer
80 | {
81 | display: block;
82 | margin: 4pt 1em 0.8em;
83 | }
84 | .indented {
85 | margin-left: 50px;
86 | }
87 |
--------------------------------------------------------------------------------
/doc/quick-reference.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/electronicarts/EASTL/7fadbf0da01e6f6e0e7038b1b34343a069b8fc51/doc/quick-reference.pdf
--------------------------------------------------------------------------------
/include/EASTL/bonus/adaptors.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | ///////////////////////////////////////////////////////////////////////////////
6 | ///////////////////////////////////////////////////////////////////////////////
7 |
8 |
9 | #ifndef EASTL_ADAPTORS_H
10 | #define EASTL_ADAPTORS_H
11 |
12 |
13 | #include
14 | #include
15 | #include
16 | #include
17 |
18 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
19 | #pragma once // Some compilers (e.g. VC++) benefit significantly from using this. We've measured 3-4% build speed improvements in apps as a result.
20 | #endif
21 |
22 | EA_DISABLE_VC_WARNING(4512 4626)
23 | #if defined(_MSC_VER) && (_MSC_VER >= 1900) // VS2015+
24 | EA_DISABLE_VC_WARNING(5027) // move assignment operator was implicitly defined as deleted
25 | #endif
26 |
27 |
28 | namespace eastl
29 | {
30 | /// reverse
31 | ///
32 | /// This adaptor allows reverse iteration of a container in ranged base for-loops.
33 | ///
34 | /// for (auto& i : reverse(c)) { ... }
35 | ///
36 | template
37 | struct reverse_wrapper
38 | {
39 | template
40 | reverse_wrapper(C&& c)
41 | : mContainer(eastl::forward(c))
42 | {
43 | /**
44 | * NOTE:
45 | *
46 | * Due to reference collapsing rules of universal references Container type is either
47 | *
48 | * const C& if the input is a const lvalue
49 | * C& if the input is a non-const lvalue
50 | * C if the input is an rvalue
51 | * const C if the input is a const rvalue thus the object will have to be copied and the copy-ctor will be called
52 | *
53 | *
54 | * Thus we either move the whole container into this object or take a reference to the lvalue avoiding the copy.
55 | * The static_assert below ensures this.
56 | */
57 | static_assert(eastl::is_same_v, "Reference collapsed deduced type must be the same as the deduced Container type!");
58 | }
59 |
60 | Container mContainer;
61 | };
62 |
63 | template
64 | auto begin(const reverse_wrapper& w) -> decltype(eastl::rbegin(w.mContainer))
65 | {
66 | return eastl::rbegin(w.mContainer);
67 | }
68 |
69 | template
70 | auto end(const reverse_wrapper& w) -> decltype(eastl::rend(w.mContainer))
71 | {
72 | return eastl::rend(w.mContainer);
73 | }
74 |
75 | template
76 | reverse_wrapper reverse(Container&& c)
77 | {
78 | return reverse_wrapper(eastl::forward(c));
79 | }
80 |
81 | } // namespace eastl
82 |
83 | #if defined(_MSC_VER) && (_MSC_VER >= 1900) // VS2015+
84 | EA_RESTORE_VC_WARNING()
85 | #endif
86 | EA_RESTORE_VC_WARNING()
87 |
88 | #endif // Header include guard
89 |
--------------------------------------------------------------------------------
/include/EASTL/bonus/call_traits.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | ///////////////////////////////////////////////////////////////////////////////
6 | // The design for call_traits here is very similar to that found in template
7 | // metaprogramming libraries such as Boost, GCC, and Metrowerks, given that
8 | // these libraries have established this interface as a defacto standard for
9 | // solving this problem. Also, these are described in various books on the
10 | // topic of template metaprogramming, such as "Modern C++ Design".
11 | //
12 | // See http://www.boost.org/libs/utility/call_traits.htm or search for
13 | // call_traits in Google for a description of call_traits.
14 | ///////////////////////////////////////////////////////////////////////////////
15 |
16 |
17 | #ifndef EASTL_CALL_TRAITS_H
18 | #define EASTL_CALL_TRAITS_H
19 |
20 |
21 | #include
22 | #include
23 |
24 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
25 | #pragma once // Some compilers (e.g. VC++) benefit significantly from using this. We've measured 3-4% build speed improvements in apps as a result.
26 | #endif
27 |
28 |
29 |
30 | namespace eastl
31 | {
32 |
33 |
34 | template
35 | struct ct_imp2 { typedef const T& param_type; };
36 |
37 | template
38 | struct ct_imp2 { typedef const T param_type; };
39 |
40 | template
41 | struct ct_imp { typedef const T& param_type; };
42 |
43 | template
44 | struct ct_imp { typedef typename ct_imp2::param_type param_type; };
45 |
46 | template
47 | struct ct_imp { typedef T const param_type; };
48 |
49 |
50 |
51 | template
52 | struct call_traits
53 | {
54 | public:
55 | typedef T value_type;
56 | typedef T& reference;
57 | typedef const T& const_reference;
58 | typedef typename ct_imp::value, is_arithmetic::value>::param_type param_type;
59 | };
60 |
61 |
62 | template
63 | struct call_traits
64 | {
65 | typedef T& value_type;
66 | typedef T& reference;
67 | typedef const T& const_reference;
68 | typedef T& param_type;
69 | };
70 |
71 |
72 | template
73 | struct call_traits
74 | {
75 | private:
76 | typedef T array_type[N];
77 |
78 | public:
79 | typedef const T* value_type;
80 | typedef array_type& reference;
81 | typedef const array_type& const_reference;
82 | typedef const T* const param_type;
83 | };
84 |
85 |
86 | template
87 | struct call_traits
88 | {
89 | private:
90 | typedef const T array_type[N];
91 |
92 | public:
93 | typedef const T* value_type;
94 | typedef array_type& reference;
95 | typedef const array_type& const_reference;
96 | typedef const T* const param_type;
97 | };
98 |
99 |
100 | } // namespace eastl
101 |
102 |
103 | #endif // Header include guard
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
--------------------------------------------------------------------------------
/include/EASTL/bonus/fixed_ring_buffer.h:
--------------------------------------------------------------------------------
1 | ///////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | ///////////////////////////////////////////////////////////////////////////////
4 |
5 | #ifndef EASTL_FIXED_RING_BUFFER_H
6 | #define EASTL_FIXED_RING_BUFFER_H
7 |
8 | #include
9 | #include
10 | #include
11 |
12 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
13 | #pragma once // Some compilers (e.g. VC++) benefit significantly from using this. We've measured 3-4% build speed improvements in apps as a result.
14 | #endif
15 |
16 | namespace eastl
17 | {
18 |
19 | /// fixed_ring_buffer
20 | ///
21 | /// This is a convenience template alias for creating a fixed-sized
22 | /// ring_buffer using eastl::fixed_vector as its storage container. This has
23 | /// been tricky for users to get correct due to the constructor requirements
24 | /// of eastl::ring_buffer leaking the implementation detail of the sentinel
25 | /// value being used internally. In addition, it was not obvious what the
26 | /// correct allocator_type template parameter should be used for containers
27 | /// providing both a default allocator type and an overflow allocator type.
28 | ///
29 | /// We are over-allocating the fixed_vector container to accommodate the
30 | /// ring_buffer sentinel to prevent that implementation detail leaking into
31 | /// user code.
32 | ///
33 | /// Example usage:
34 | ///
35 | /// fixed_ring_buffer rb = {0, 1, 2, 3, 4, 5, 6, 7};
36 | /// or
37 | /// fixed_ring_buffer rb(8); // capacity doesn't need to respect sentinel
38 | /// rb.push_back(0);
39 | ///
40 | ///
41 | #if !defined(EA_COMPILER_NO_TEMPLATE_ALIASES)
42 | template
43 | using fixed_ring_buffer =
44 | ring_buffer, typename fixed_vector::overflow_allocator_type>;
45 | #endif
46 |
47 | } // namespace eastl
48 |
49 | #endif // Header include guard
50 |
51 |
--------------------------------------------------------------------------------
/include/EASTL/bonus/overloaded.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | #ifndef EASTL_OVERLOADED_H
6 | #define EASTL_OVERLOADED_H
7 |
8 | #include
9 | #include
10 |
11 |
12 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
13 | #pragma once // Some compilers (e.g. VC++) benefit significantly from using this. We've measured 3-4% build speed
14 | // improvements in apps as a result.
15 | #endif
16 |
17 | // 4512/4626 - 'class' : assignment operator could not be generated. // This disabling would best be put elsewhere.
18 | EA_DISABLE_VC_WARNING(4512 4626);
19 |
20 | namespace eastl
21 | {
22 | ///////////////////////////////////////////////////////////////////////////
23 | /// overloaded
24 | ///
25 | /// A helper class that permits you to combine multiple function objects into one.
26 | /// Typically, this helper is really handy when visiting an eastl::variant with multiple lambdas.
27 | /// Example:
28 | ///
29 | /// eastl::variant v{42};
30 | ///
31 | /// eastl::visit(
32 | /// eastl::overloaded{
33 | /// [](const int& x) { std::cout << "Visited an integer: " << x << "\n"; }, // Will reach that lambda with x == 42.
34 | /// [](const string& s) { std::cout << "Visited an string: " << s << "\n"; }
35 | /// },
36 | /// v
37 | /// );
38 | ///////////////////////////////////////////////////////////////////////////
39 | template
40 | struct overloaded;
41 |
42 | template
43 | struct overloaded : T
44 | {
45 | template
46 | EA_CPP14_CONSTEXPR overloaded(U&& u) : T(eastl::forward(u))
47 | {
48 | }
49 |
50 | using T::operator();
51 | };
52 |
53 | template
54 | struct overloaded : T, overloaded
55 | {
56 | template
57 | EA_CPP14_CONSTEXPR overloaded(U&& u, V&&... v) : T(eastl::forward(u)), overloaded(eastl::forward(v)...)
58 | {
59 | }
60 |
61 | using T::operator();
62 | using overloaded::operator();
63 | };
64 |
65 | #ifdef __cpp_deduction_guides
66 | template
67 | overloaded(T...) -> overloaded;
68 | #endif
69 |
70 | ///////////////////////////////////////////////////////////////////////////
71 | /// make_overloaded
72 | ///
73 | /// Helper function to create an overloaded instance when lacking deduction guides.
74 | /// make_overloaded(f1, f2, f3) == overloaded{f1, f2, f3}
75 | ///////////////////////////////////////////////////////////////////////////
76 | template
77 | EA_CPP14_CONSTEXPR overloaded::type...> make_overloaded(T&&... t)
78 | {
79 | return overloaded::type...>{eastl::forward(t)...};
80 | }
81 |
82 | } // namespace eastl
83 |
84 | EA_RESTORE_VC_WARNING();
85 |
86 | #endif // EASTL_OVERLOADED_H
87 |
--------------------------------------------------------------------------------
/include/EASTL/compare.h:
--------------------------------------------------------------------------------
1 | ///////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | ///////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_COMPARE_H
7 | #define EASTL_COMPARE_H
8 |
9 |
10 | #include
11 |
12 | namespace eastl
13 | {
14 |
15 | #if defined(EA_COMPILER_HAS_THREE_WAY_COMPARISON)
16 | struct synth_three_way
17 | {
18 | template
19 | constexpr auto operator()(const T& t, const U& u) const requires requires
20 | {
21 | {t < u}->std::convertible_to;
22 | {u < t}->std::convertible_to;
23 | }
24 | {
25 | if constexpr (std::three_way_comparable_with)
26 | {
27 | return t <=> u;
28 | }
29 | else
30 | {
31 | return (t < u) ? std::weak_ordering::less :
32 | (u < t) ? std::weak_ordering::greater :
33 | std::weak_ordering::equivalent;
34 | }
35 | }
36 | };
37 |
38 | template
39 | using synth_three_way_result = decltype(synth_three_way{}(declval(), declval()));
40 | #endif
41 |
42 | } // namespace eastl
43 |
44 |
45 | #endif // Header include guard
--------------------------------------------------------------------------------
/include/EASTL/core_allocator.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | #ifndef EASTL_CORE_ALLOCATOR_H
6 | #define EASTL_CORE_ALLOCATOR_H
7 |
8 | #if EASTL_CORE_ALLOCATOR_ENABLED
9 |
10 | #include
11 |
12 | namespace EA
13 | {
14 | namespace Allocator
15 | {
16 | /// EASTLCoreAllocatorImpl
17 | ///
18 | /// EASTL provides an out of the box implementation of the
19 | /// ICoreAllocator interface. This is provided as a convenience for
20 | /// users who wish to provide ICoreAllocator implementations for EASTL to use.
21 | ///
22 | /// EASTL has a dependency on coreallocator so to provide an out of
23 | /// the box implementation for EASTLCoreAlloctor and EASTLCoreDeleter
24 | /// that can be used and tested. Historically we could not test
25 | /// ICoreAllocator interface because we relied on the code being linked
26 | /// in user code.
27 | ///
28 |
29 | class EASTLCoreAllocatorImpl : public ICoreAllocator
30 | {
31 | public:
32 | virtual void* Alloc(size_t size, const char* name, unsigned int flags)
33 | {
34 | return ::operator new[](size, name, flags, 0, __FILE__, __LINE__);
35 | }
36 |
37 | virtual void* Alloc(size_t size, const char* name, unsigned int flags, unsigned int alignment, unsigned int alignOffset = 0)
38 | {
39 | return ::operator new[](size, alignment, alignOffset, name, flags, 0, __FILE__, __LINE__);
40 | }
41 |
42 | virtual void Free(void* ptr, size_t size = 0)
43 | {
44 | ::operator delete(static_cast(ptr));
45 | }
46 |
47 | virtual void* AllocDebug(size_t size, const DebugParams debugParams, unsigned int flags)
48 | {
49 | return Alloc(size, debugParams.mName, flags);
50 | }
51 |
52 | virtual void* AllocDebug(size_t size, const DebugParams debugParams, unsigned int flags, unsigned int align, unsigned int alignOffset = 0)
53 | {
54 | return Alloc(size, debugParams.mName, flags, align, alignOffset);
55 | }
56 |
57 | static EASTLCoreAllocatorImpl* GetDefaultAllocator();
58 | };
59 |
60 | inline EASTLCoreAllocatorImpl* EASTLCoreAllocatorImpl::GetDefaultAllocator()
61 | {
62 | static EASTLCoreAllocatorImpl allocator;
63 | return &allocator;
64 | }
65 | }
66 | }
67 |
68 | #endif // EASTL_CORE_ALLOCATOR_ENABLED
69 | #endif // EASTL_CORE_ALLOCATOR_H
70 |
71 |
--------------------------------------------------------------------------------
/include/EASTL/finally.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | ///////////////////////////////////////////////////////////////////////////////
6 | // eastl::finally is an implementation of the popular cpp idiom RAII - Resource
7 | // Acquisition Is Initialization. eastl::finally guarantees that the user
8 | // provided callable will be executed upon whatever mechanism is used to leave
9 | // the current scope. This can guard against user errors but this is a popular
10 | // technique to write robust code in execution environments that have exceptions
11 | // enabled.
12 | //
13 | // Example:
14 | // void foo()
15 | // {
16 | // void* p = malloc(128);
17 | // auto _ = eastl::make_finally([&] { free(p); });
18 | //
19 | // // Code that may throw an exception...
20 | //
21 | // } // eastl::finally guaranteed to call 'free' at scope exit.
22 | //
23 | // References:
24 | // * https://www.bfilipek.com/2017/04/finalact.html
25 | ///////////////////////////////////////////////////////////////////////////////
26 |
27 | #ifndef EASTL_FINALLY_H
28 | #define EASTL_FINALLY_H
29 |
30 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
31 | #pragma once
32 | #endif
33 |
34 | #include
35 | #include
36 | #include
37 |
38 | namespace eastl
39 | {
40 | ///////////////////////////////////////////////////////////////////////////
41 | // finally
42 | //
43 | // finally is the type that calls the users callback on scope exit.
44 | //
45 | template
46 | class finally
47 | {
48 | static_assert(!eastl::is_lvalue_reference_v, "eastl::finally requires the callable is passed as an rvalue reference.");
49 |
50 | Functor m_functor;
51 | bool m_engaged = false;
52 |
53 | public:
54 | finally(Functor f) : m_functor(eastl::move(f)), m_engaged(true) {}
55 |
56 | finally(finally&& other) : m_functor(eastl::move(other.m_functor)), m_engaged(other.m_engaged)
57 | {
58 | other.dismiss();
59 | }
60 |
61 | ~finally() { execute(); }
62 |
63 | finally(const finally&) = delete;
64 | finally& operator=(const finally&) = delete;
65 | finally& operator=(finally&&) = delete;
66 |
67 | inline void dismiss() { m_engaged = false; }
68 |
69 | inline void execute()
70 | {
71 | if (m_engaged)
72 | m_functor();
73 |
74 | dismiss();
75 | }
76 | };
77 |
78 |
79 | ///////////////////////////////////////////////////////////////////////////
80 | // make_finally
81 | //
82 | // this utility function is the standard mechansim to perform the required
83 | // type deduction on the users provided callback inorder to create a
84 | // 'finally' object.
85 | //
86 | template
87 | auto make_finally(F&& f)
88 | {
89 | return finally(eastl::forward(f));
90 | }
91 | }
92 |
93 | #endif // EASTL_FINALLY_H
94 |
--------------------------------------------------------------------------------
/include/EASTL/initializer_list.h:
--------------------------------------------------------------------------------
1 | ///////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | //
4 | // This file #includes if it's available, else it defines
5 | // its own version of std::initializer_list. It does not define eastl::initializer_list
6 | // because that would not provide any use, due to how the C++11 Standard works.
7 | ///////////////////////////////////////////////////////////////////////////////
8 |
9 |
10 | #ifndef EASTL_INITIALIZER_LIST_H
11 | #define EASTL_INITIALIZER_LIST_H
12 |
13 |
14 | #include
15 | #include
16 |
17 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
18 | #pragma once // Some compilers (e.g. VC++) benefit significantly from using this. We've measured 3-4% build speed improvements in apps as a result.
19 | #endif
20 |
21 |
22 | #if defined(EA_HAVE_CPP11_INITIALIZER_LIST) // If the compiler can generate calls to std::initializer_list...
23 |
24 | // The initializer_list type must be declared in the std namespace, as that's the
25 | // namespace the compiler uses when generating code to use it.
26 | EA_DISABLE_ALL_VC_WARNINGS()
27 | #include
28 | EA_RESTORE_ALL_VC_WARNINGS()
29 |
30 | #else
31 |
32 | // If you get an error here about initializer_list being already defined, then the EA_HAVE_CPP11_INITIALIZER_LIST define from needs to be updated.
33 | namespace std
34 | {
35 | // See the C++11 Standard, section 18.9.
36 | template
37 | class initializer_list
38 | {
39 | public:
40 | typedef E value_type;
41 | typedef const E& reference;
42 | typedef const E& const_reference;
43 | typedef size_t size_type;
44 | typedef const E* iterator; // Must be const, as initializer_list (and its mpArray) is an immutable temp object.
45 | typedef const E* const_iterator;
46 |
47 | private:
48 | iterator mpArray;
49 | size_type mArraySize;
50 |
51 | // This constructor is private, but the C++ compiler has the ability to call it, as per the C++11 Standard.
52 | initializer_list(const_iterator pArray, size_type arraySize)
53 | : mpArray(pArray), mArraySize(arraySize) { }
54 |
55 | public:
56 | initializer_list() EA_NOEXCEPT // EA_NOEXCEPT requires a recent version of EABase.
57 | : mpArray(NULL), mArraySize(0) { }
58 |
59 | #if defined(EA_COMPILER_MSVC)
60 | // MSVC generates constructor calls with two pointers instead of one pointer + size. The constructor is
61 | // public.
62 | // See: https://docs.microsoft.com/en-us/cpp/standard-library/initializer-list-class#initializer_list
63 | initializer_list(const_iterator pFirst, const_iterator pLast) EA_NOEXCEPT
64 | : mpArray(pFirst), mArraySize(pLast - pFirst) { }
65 | #endif
66 |
67 | size_type size() const EA_NOEXCEPT { return mArraySize; }
68 | const_iterator begin() const EA_NOEXCEPT { return mpArray; } // Must be const_iterator, as initializer_list (and its mpArray) is an immutable temp object.
69 | const_iterator end() const EA_NOEXCEPT { return mpArray + mArraySize; }
70 | };
71 |
72 |
73 | template
74 | const T* begin(std::initializer_list ilist) EA_NOEXCEPT
75 | {
76 | return ilist.begin();
77 | }
78 |
79 | template
80 | const T* end(std::initializer_list ilist) EA_NOEXCEPT
81 | {
82 | return ilist.end();
83 | }
84 | }
85 |
86 | #endif
87 |
88 |
89 | #endif // Header include guard
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arch.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // Include the architecture specific implementations
17 | //
18 | #if defined(EA_PROCESSOR_X86) || defined(EA_PROCESSOR_X86_64)
19 |
20 | #include "x86/arch_x86.h"
21 |
22 | #elif defined(EA_PROCESSOR_ARM32) || defined(EA_PROCESSOR_ARM64)
23 |
24 | #include "arm/arch_arm.h"
25 |
26 | #endif
27 |
28 |
29 | /////////////////////////////////////////////////////////////////////////////////
30 |
31 |
32 | #include "arch_fetch_add.h"
33 | #include "arch_fetch_sub.h"
34 |
35 | #include "arch_fetch_and.h"
36 | #include "arch_fetch_xor.h"
37 | #include "arch_fetch_or.h"
38 |
39 | #include "arch_add_fetch.h"
40 | #include "arch_sub_fetch.h"
41 |
42 | #include "arch_and_fetch.h"
43 | #include "arch_xor_fetch.h"
44 | #include "arch_or_fetch.h"
45 |
46 | #include "arch_exchange.h"
47 |
48 | #include "arch_cmpxchg_weak.h"
49 | #include "arch_cmpxchg_strong.h"
50 |
51 | #include "arch_load.h"
52 | #include "arch_store.h"
53 |
54 | #include "arch_compiler_barrier.h"
55 |
56 | #include "arch_cpu_pause.h"
57 |
58 | #include "arch_memory_barrier.h"
59 |
60 | #include "arch_signal_fence.h"
61 |
62 | #include "arch_thread_fence.h"
63 |
64 |
65 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_H */
66 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arch_compiler_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_COMPILER_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_COMPILER_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #define EASTL_ARCH_ATOMIC_COMPILER_BARRIER_AVAILABLE 0
15 |
16 | #define EASTL_ARCH_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY_AVAILABLE 0
17 |
18 |
19 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_COMPILER_BARRIER_H */
20 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arch_cpu_pause.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // copyright (c) electronic arts inc. all rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_CPU_PAUSE_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_CPU_PAUSE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_CPU_PAUSE()
17 | //
18 | #if defined(EASTL_ARCH_ATOMIC_CPU_PAUSE)
19 | #define EASTL_ARCH_ATOMIC_CPU_PAUSE_AVAILABLE 1
20 | #else
21 | #define EASTL_ARCH_ATOMIC_CPU_PAUSE_AVAILABLE 0
22 | #endif
23 |
24 |
25 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_CPU_PAUSE_H */
26 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arch_memory_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_MEMORY_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_MEMORY_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_CPU_MB()
17 | //
18 | #if defined(EASTL_ARCH_ATOMIC_CPU_MB)
19 | #define EASTL_ARCH_ATOMIC_CPU_MB_AVAILABLE 1
20 | #else
21 | #define EASTL_ARCH_ATOMIC_CPU_MB_AVAILABLE 0
22 | #endif
23 |
24 |
25 | /////////////////////////////////////////////////////////////////////////////////
26 | //
27 | // void EASTL_ARCH_ATOMIC_CPU_WMB()
28 | //
29 | #if defined(EASTL_ARCH_ATOMIC_CPU_WMB)
30 | #define EASTL_ARCH_ATOMIC_CPU_WMB_AVAILABLE 1
31 | #else
32 | #define EASTL_ARCH_ATOMIC_CPU_WMB_AVAILABLE 0
33 | #endif
34 |
35 |
36 | /////////////////////////////////////////////////////////////////////////////////
37 | //
38 | // void EASTL_ARCH_ATOMIC_CPU_RMB()
39 | //
40 | #if defined(EASTL_ARCH_ATOMIC_CPU_RMB)
41 | #define EASTL_ARCH_ATOMIC_CPU_RMB_AVAILABLE 1
42 | #else
43 | #define EASTL_ARCH_ATOMIC_CPU_RMB_AVAILABLE 0
44 | #endif
45 |
46 |
47 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_MEMORY_BARRIER_H */
48 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arch_signal_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_SIGNAL_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_SIGNAL_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_RELAXED_AVAILABLE 0
15 | #define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_ACQUIRE_AVAILABLE 0
16 | #define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_RELEASE_AVAILABLE 0
17 | #define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_ACQ_REL_AVAILABLE 0
18 | #define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_SEQ_CST_AVAILABLE 0
19 |
20 |
21 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_SIGNAL_FENCE_H */
22 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arch_store.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_STORE_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_STORE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_STORE_*_N(type, type * ptr, type val)
17 | //
18 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_8)
19 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8_AVAILABLE 1
20 | #else
21 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8_AVAILABLE 0
22 | #endif
23 |
24 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_8)
25 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8_AVAILABLE 1
26 | #else
27 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8_AVAILABLE 0
28 | #endif
29 |
30 | #if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8)
31 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8_AVAILABLE 1
32 | #else
33 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8_AVAILABLE 0
34 | #endif
35 |
36 |
37 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_16)
38 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16_AVAILABLE 1
39 | #else
40 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16_AVAILABLE 0
41 | #endif
42 |
43 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_16)
44 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16_AVAILABLE 1
45 | #else
46 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16_AVAILABLE 0
47 | #endif
48 |
49 | #if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16)
50 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16_AVAILABLE 1
51 | #else
52 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16_AVAILABLE 0
53 | #endif
54 |
55 |
56 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_32)
57 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32_AVAILABLE 1
58 | #else
59 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32_AVAILABLE 0
60 | #endif
61 |
62 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_32)
63 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32_AVAILABLE 1
64 | #else
65 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32_AVAILABLE 0
66 | #endif
67 |
68 | #if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32)
69 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32_AVAILABLE 1
70 | #else
71 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32_AVAILABLE 0
72 | #endif
73 |
74 |
75 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_64)
76 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64_AVAILABLE 1
77 | #else
78 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64_AVAILABLE 0
79 | #endif
80 |
81 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_64)
82 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64_AVAILABLE 1
83 | #else
84 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64_AVAILABLE 0
85 | #endif
86 |
87 | #if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64)
88 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64_AVAILABLE 1
89 | #else
90 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64_AVAILABLE 0
91 | #endif
92 |
93 |
94 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_128)
95 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128_AVAILABLE 1
96 | #else
97 | #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128_AVAILABLE 0
98 | #endif
99 |
100 | #if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_128)
101 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128_AVAILABLE 1
102 | #else
103 | #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128_AVAILABLE 0
104 | #endif
105 |
106 | #if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128)
107 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128_AVAILABLE 1
108 | #else
109 | #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128_AVAILABLE 0
110 | #endif
111 |
112 |
113 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_STORE_H */
114 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arch_thread_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_THREAD_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_THREAD_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_THREAD_FENCE_*()
17 | //
18 | #if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED)
19 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED_AVAILABLE 1
20 | #else
21 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED_AVAILABLE 0
22 | #endif
23 |
24 | #if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE)
25 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE_AVAILABLE 1
26 | #else
27 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE_AVAILABLE 0
28 | #endif
29 |
30 | #if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE)
31 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE_AVAILABLE 1
32 | #else
33 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE_AVAILABLE 0
34 | #endif
35 |
36 | #if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL)
37 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL_AVAILABLE 1
38 | #else
39 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL_AVAILABLE 0
40 | #endif
41 |
42 | #if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST)
43 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST_AVAILABLE 1
44 | #else
45 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST_AVAILABLE 0
46 | #endif
47 |
48 |
49 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_THREAD_FENCE_H */
50 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arm/arch_arm.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_ARM_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /**
15 | * NOTE: We use this mapping
16 | *
17 | * ARMv7 Mapping 'trailing sync;':
18 | *
19 | * Load Relaxed : ldr
20 | * Load Acquire : ldr; dmb ish
21 | * Load Seq_Cst : ldr; dmb ish
22 | *
23 | * Store Relaxed : str
24 | * Store Release : dmb ish; str
25 | * Store Seq_Cst : dmb ish; str; dmb ish
26 | *
27 | * Relaxed Fence :
28 | * Acquire Fence : dmb ish
29 | * Release Fence : dmb ish
30 | * Acq_Rel Fence : dmb ish
31 | * Seq_Cst Fence : dmb ish
32 | */
33 |
34 | /**
35 | * ARMv7 Mapping 'leading sync;';
36 | *
37 | * Load Relaxed : ldr
38 | * Load Acquire : ldr; dmb ish
39 | * Load Seq_Cst : dmb ish; ldr; dmb ish
40 | *
41 | * Store Relaxed : str
42 | * Store Release : dmb ish; str
43 | * Store Seq_Cst : dmb ish: str
44 | *
45 | * Relaxed Fence :
46 | * Acquire Fence : dmb ish
47 | * Release Fence : dmb ish
48 | * Acq_Rel Fence : dmb ish
49 | * Seq_Cst Fence : dmb ish
50 | */
51 |
52 | /**
53 | * NOTE:
54 | *
55 | * On ARM32/64, we use the 'trailing sync;' convention with the stricter load acquire that uses
56 | * a dmb instead of a control dependency + isb to ensure the IRIW litmus test is satisfied
57 | * as one reason. See EASTL/atomic.h for futher explanation and deep-dive.
58 | *
59 | * For ARMv8 we could move to use the new proper store release and load acquire, RCsc variant.
60 | * All ARMv7 approaches work on ARMv8 and this code path is only used on msvc which isn't used
61 | * heavily. Most of the ARM code will end up going thru clang or gcc since microsoft arm devices
62 | * aren't that abundant.
63 | */
64 |
65 |
66 | /////////////////////////////////////////////////////////////////////////////////
67 |
68 |
69 | #if defined(EA_COMPILER_MSVC)
70 |
71 | #if EA_PLATFORM_PTR_SIZE == 8
72 | #define EASTL_ARCH_ATOMIC_HAS_128BIT
73 | #endif
74 |
75 | #endif
76 |
77 |
78 | /////////////////////////////////////////////////////////////////////////////////
79 |
80 |
81 | #include "arch_arm_load.h"
82 | #include "arch_arm_store.h"
83 |
84 | #include "arch_arm_memory_barrier.h"
85 |
86 | #include "arch_arm_thread_fence.h"
87 |
88 |
89 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_H */
90 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arm/arch_arm_memory_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #if defined(EA_COMPILER_MSVC) && !defined(EA_COMPILER_CLANG_CL)
15 |
16 | #if defined(EA_PROCESSOR_ARM32)
17 |
18 | #define EASTL_ARM_DMB_ISH _ARM_BARRIER_ISH
19 |
20 | #define EASTL_ARM_DMB_ISHST _ARM_BARRIER_ISHST
21 |
22 | #define EASTL_ARM_DMB_ISHLD _ARM_BARRIER_ISH
23 |
24 | #elif defined(EA_PROCESSOR_ARM64)
25 |
26 | #define EASTL_ARM_DMB_ISH _ARM64_BARRIER_ISH
27 |
28 | #define EASTL_ARM_DMB_ISHST _ARM64_BARRIER_ISHST
29 |
30 | #define EASTL_ARM_DMB_ISHLD _ARM64_BARRIER_ISHLD
31 |
32 | #endif
33 |
34 |
35 | /**
36 | * NOTE:
37 | *
38 | * While it makes no sense for a hardware memory barrier to not imply a compiler barrier.
39 | * MSVC docs do not explicitly state that, so better to be safe than sorry chasing down
40 | * hard to find bugs due to the compiler deciding to reorder things.
41 | */
42 |
43 | #define EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(option) \
44 | EASTL_ATOMIC_COMPILER_BARRIER(); \
45 | __dmb(option); \
46 | EASTL_ATOMIC_COMPILER_BARRIER()
47 |
48 |
49 | #elif defined(EA_COMPILER_GNUC) || defined(__clang__)
50 |
51 | #define EASTL_ARM_DMB_ISH ish
52 |
53 | #define EASTL_ARM_DMB_ISHST ishst
54 |
55 | #if defined(EA_PROCESSOR_ARM32)
56 |
57 | #define EASTL_ARM_DMB_ISHLD ish
58 |
59 | #elif defined(EA_PROCESSOR_ARM64)
60 |
61 | #define EASTL_ARM_DMB_ISHLD ishld
62 |
63 | #endif
64 |
65 |
66 | #define EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(option) \
67 | __asm__ __volatile__ ("dmb " EA_STRINGIFY(option) ::: "memory")
68 |
69 |
70 | #endif
71 |
72 |
73 | /////////////////////////////////////////////////////////////////////////////////
74 | //
75 | // void EASTL_ARCH_ATOMIC_CPU_MB()
76 | //
77 | #define EASTL_ARCH_ATOMIC_CPU_MB() \
78 | EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISH)
79 |
80 |
81 | /////////////////////////////////////////////////////////////////////////////////
82 | //
83 | // void EASTL_ARCH_ATOMIC_CPU_WMB()
84 | //
85 | #define EASTL_ARCH_ATOMIC_CPU_WMB() \
86 | EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISHST)
87 |
88 |
89 | /////////////////////////////////////////////////////////////////////////////////
90 | //
91 | // void EASTL_ARCH_ATOMIC_CPU_RMB()
92 | //
93 | #define EASTL_ARCH_ATOMIC_CPU_RMB() \
94 | EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISHLD)
95 |
96 |
97 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H */
98 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/arm/arch_arm_thread_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_THREAD_FENCE_*()
17 | //
18 | #if defined(EA_COMPILER_MSVC)
19 |
20 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED()
21 |
22 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE() \
23 | EASTL_ATOMIC_CPU_MB()
24 |
25 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE() \
26 | EASTL_ATOMIC_CPU_MB()
27 |
28 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL() \
29 | EASTL_ATOMIC_CPU_MB()
30 |
31 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST() \
32 | EASTL_ATOMIC_CPU_MB()
33 |
34 | #endif
35 |
36 |
37 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H */
38 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_cmpxchg_strong.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_STRONG_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_STRONG_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_*_*_N(type, bool ret, type * ptr, type * expected, type desired)
17 | //
18 | #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
19 |
20 |
21 | #define EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) \
22 | { \
23 | /* Compare RDX:RAX with m128. If equal, set ZF and load RCX:RBX into m128. Else, clear ZF and load m128 into RDX:RAX. */ \
24 | __asm__ __volatile__ ("lock; cmpxchg16b %2\n" /* cmpxchg16b sets/clears ZF */ \
25 | "sete %3" /* If ZF == 1, set the return value to 1 */ \
26 | /* Output Operands */ \
27 | : "=a"((EASTL_ATOMIC_TYPE_CAST(uint64_t, (expected)))[0]), "=d"((EASTL_ATOMIC_TYPE_CAST(uint64_t, (expected)))[1]), \
28 | "+m"(*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(__uint128_t, (ptr)))), \
29 | "=rm"((ret)) \
30 | /* Input Operands */ \
31 | : "b"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(desired)))[0]), "c"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(desired)))[1]), \
32 | "a"((EASTL_ATOMIC_TYPE_CAST(uint64_t, (expected)))[0]), "d"((EASTL_ATOMIC_TYPE_CAST(uint64_t, (expected)))[1]) \
33 | /* Clobbers */ \
34 | : "memory", "cc"); \
35 | }
36 |
37 |
38 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128(type, ret, ptr, expected, desired) \
39 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
40 |
41 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_128(type, ret, ptr, expected, desired) \
42 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
43 |
44 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128(type, ret, ptr, expected, desired) \
45 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
46 |
47 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128(type, ret, ptr, expected, desired) \
48 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
49 |
50 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_128(type, ret, ptr, expected, desired) \
51 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
52 |
53 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128(type, ret, ptr, expected, desired) \
54 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
55 |
56 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_128(type, ret, ptr, expected, desired) \
57 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
58 |
59 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_128(type, ret, ptr, expected, desired) \
60 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
61 |
62 | #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128(type, ret, ptr, expected, desired) \
63 | EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired)
64 |
65 |
66 | #endif
67 |
68 |
69 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_STRONG_H */
70 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_cmpxchg_weak.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_WEAK_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_WEAK_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_*_*_N(type, bool ret, type * ptr, type * expected, type desired)
17 | //
18 | #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
19 |
20 |
21 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_128(type, ret, ptr, expected, desired) \
22 | EASTL_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128(type, ret, ptr, expected, desired)
23 |
24 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_128(type, ret, ptr, expected, desired) \
25 | EASTL_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_128(type, ret, ptr, expected, desired)
26 |
27 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_128(type, ret, ptr, expected, desired) \
28 | EASTL_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128(type, ret, ptr, expected, desired)
29 |
30 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_128(type, ret, ptr, expected, desired) \
31 | EASTL_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128(type, ret, ptr, expected, desired)
32 |
33 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_128(type, ret, ptr, expected, desired) \
34 | EASTL_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_128(type, ret, ptr, expected, desired)
35 |
36 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_128(type, ret, ptr, expected, desired) \
37 | EASTL_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128(type, ret, ptr, expected, desired)
38 |
39 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_128(type, ret, ptr, expected, desired) \
40 | EASTL_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_128(type, ret, ptr, expected, desired)
41 |
42 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_128(type, ret, ptr, expected, desired) \
43 | EASTL_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_128(type, ret, ptr, expected, desired)
44 |
45 | #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_128(type, ret, ptr, expected, desired) \
46 | EASTL_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128(type, ret, ptr, expected, desired)
47 |
48 |
49 | #endif
50 |
51 |
52 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_WEAK_H */
53 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_exchange.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_EXCHANGE_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_EXCHANGE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_EXCHANGE_*_N(type, type ret, type * ptr, type val)
17 | //
18 | #if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86)
19 |
20 |
21 | #define EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED(ret, observed, val) \
22 | ret = (val)
23 |
24 |
25 | #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_64(type, ret, ptr, val) \
26 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \
27 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \
28 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
29 |
30 | #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_64(type, ret, ptr, val) \
31 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \
32 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \
33 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
34 |
35 | #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_64(type, ret, ptr, val) \
36 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \
37 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \
38 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
39 |
40 | #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_64(type, ret, ptr, val) \
41 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \
42 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \
43 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
44 |
45 | #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_64(type, ret, ptr, val) \
46 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \
47 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \
48 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
49 |
50 |
51 | #endif
52 |
53 |
54 | #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
55 |
56 |
57 | #define EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, MemoryOrder) \
58 | { \
59 | EASTL_ATOMIC_DEFAULT_INIT(bool, cmpxchgRet); \
60 | /* This is intentionally a non-atomic 128-bit load which may observe shearing. */ \
61 | /* Either we do not observe *(ptr) but then the cmpxchg will fail and the observed */ \
62 | /* atomic load will be returned. Or the non-atomic load got lucky and the cmpxchg succeeds */ \
63 | /* because the observed value equals the value in *(ptr) thus we optimistically do a non-atomic load. */ \
64 | ret = *(ptr); \
65 | do \
66 | { \
67 | EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRet, ptr, &(ret), val); \
68 | } while (!cmpxchgRet); \
69 | }
70 |
71 |
72 | #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_128(type, ret, ptr, val) \
73 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, RELAXED)
74 |
75 | #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_128(type, ret, ptr, val) \
76 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, ACQUIRE)
77 |
78 | #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_128(type, ret, ptr, val) \
79 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, RELEASE)
80 |
81 | #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_128(type, ret, ptr, val) \
82 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, ACQ_REL)
83 |
84 | #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_128(type, ret, ptr, val) \
85 | EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, SEQ_CST)
86 |
87 |
88 | #endif
89 |
90 |
91 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_EXCHANGE_H */
92 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_add.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_ADD_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_ADD_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_FETCH_ADD_*_N(type, type ret, type * ptr, type val)
17 | //
18 | #if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86)
19 |
20 |
21 | #define EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED(ret, observed, val) \
22 | ret = ((observed) + (val))
23 |
24 |
25 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_64(type, ret, ptr, val) \
26 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \
27 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
28 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
29 |
30 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_64(type, ret, ptr, val) \
31 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \
32 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
33 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
34 |
35 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_64(type, ret, ptr, val) \
36 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \
37 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
38 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
39 |
40 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_64(type, ret, ptr, val) \
41 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \
42 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
43 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
44 |
45 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_64(type, ret, ptr, val) \
46 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \
47 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
48 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
49 |
50 |
51 | #endif
52 |
53 |
54 | #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
55 |
56 |
57 | #define EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED(ret, observed, val) \
58 | ret = ((observed) + (val))
59 |
60 |
61 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_128(type, ret, ptr, val) \
62 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \
63 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
64 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
65 |
66 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_128(type, ret, ptr, val) \
67 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \
68 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
69 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
70 |
71 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_128(type, ret, ptr, val) \
72 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \
73 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
74 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
75 |
76 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_128(type, ret, ptr, val) \
77 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \
78 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
79 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
80 |
81 | #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_128(type, ret, ptr, val) \
82 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \
83 | EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \
84 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
85 |
86 |
87 | #endif
88 |
89 |
90 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_ADD_H */
91 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_and.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_AND_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_AND_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_FETCH_AND_*_N(type, type ret, type * ptr, type val)
17 | //
18 | #if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86)
19 |
20 |
21 | #define EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED(ret, observed, val) \
22 | ret = ((observed) & (val))
23 |
24 |
25 | #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_64(type, ret, ptr, val) \
26 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \
27 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
28 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
29 |
30 | #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_64(type, ret, ptr, val) \
31 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \
32 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
33 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
34 |
35 | #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_64(type, ret, ptr, val) \
36 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \
37 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
38 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
39 |
40 | #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_64(type, ret, ptr, val) \
41 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \
42 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
43 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
44 |
45 | #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_64(type, ret, ptr, val) \
46 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \
47 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
48 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
49 |
50 |
51 | #endif
52 |
53 |
54 | #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
55 |
56 |
57 | #define EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED(ret, observed, val) \
58 | ret = ((observed) & (val))
59 |
60 |
61 | #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_128(type, ret, ptr, val) \
62 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \
63 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
64 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
65 |
66 | #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_128(type, ret, ptr, val) \
67 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \
68 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
69 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
70 |
71 | #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_128(type, ret, ptr, val) \
72 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \
73 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
74 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
75 |
76 | #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_128(type, ret, ptr, val) \
77 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \
78 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
79 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
80 |
81 | #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_128(type, ret, ptr, val) \
82 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \
83 | EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \
84 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
85 |
86 |
87 | #endif
88 |
89 |
90 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_AND_H */
91 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_or.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_OR_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_OR_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_FETCH_OR_*_N(type, type ret, type * ptr, type val)
17 | //
18 | #if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86)
19 |
20 |
21 | #define EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED(ret, observed, val) \
22 | ret = ((observed) | (val))
23 |
24 |
25 | #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_64(type, ret, ptr, val) \
26 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \
27 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
28 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
29 |
30 | #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_64(type, ret, ptr, val) \
31 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \
32 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
33 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
34 |
35 | #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_64(type, ret, ptr, val) \
36 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \
37 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
38 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
39 |
40 | #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_64(type, ret, ptr, val) \
41 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \
42 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
43 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
44 |
45 | #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_64(type, ret, ptr, val) \
46 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \
47 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
48 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
49 |
50 |
51 | #endif
52 |
53 |
54 | #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
55 |
56 |
57 | #define EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED(ret, observed, val) \
58 | ret = ((observed) | (val))
59 |
60 |
61 | #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_128(type, ret, ptr, val) \
62 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \
63 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
64 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
65 |
66 | #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_128(type, ret, ptr, val) \
67 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \
68 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
69 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
70 |
71 | #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_128(type, ret, ptr, val) \
72 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \
73 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
74 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
75 |
76 | #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_128(type, ret, ptr, val) \
77 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \
78 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
79 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
80 |
81 | #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_128(type, ret, ptr, val) \
82 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \
83 | EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \
84 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
85 |
86 |
87 | #endif
88 |
89 |
90 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_OR_H */
91 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_sub.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_SUB_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_SUB_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_FETCH_SUB_*_N(type, type ret, type * ptr, type val)
17 | //
18 | #if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86)
19 |
20 |
21 | #define EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED(ret, observed, val) \
22 | ret = ((observed) - (val))
23 |
24 |
25 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_64(type, ret, ptr, val) \
26 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \
27 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
28 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
29 |
30 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_64(type, ret, ptr, val) \
31 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \
32 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
33 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
34 |
35 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_64(type, ret, ptr, val) \
36 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \
37 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
38 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
39 |
40 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_64(type, ret, ptr, val) \
41 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \
42 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
43 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
44 |
45 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_64(type, ret, ptr, val) \
46 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \
47 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
48 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
49 |
50 |
51 | #endif
52 |
53 |
54 | #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
55 |
56 |
57 | #define EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED(ret, observed, val) \
58 | ret = ((observed) - (val))
59 |
60 |
61 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_128(type, ret, ptr, val) \
62 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \
63 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
64 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
65 |
66 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_128(type, ret, ptr, val) \
67 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \
68 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
69 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
70 |
71 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_128(type, ret, ptr, val) \
72 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \
73 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
74 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
75 |
76 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_128(type, ret, ptr, val) \
77 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \
78 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
79 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
80 |
81 | #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_128(type, ret, ptr, val) \
82 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \
83 | EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \
84 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
85 |
86 |
87 | #endif
88 |
89 |
90 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_SUB_H */
91 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_xor.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_FETCH_XOR_*_N(type, type ret, type * ptr, type val)
17 | //
18 | #if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86)
19 |
20 |
21 | #define EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED(ret, observed, val) \
22 | ret = ((observed) ^ (val))
23 |
24 |
25 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_64(type, ret, ptr, val) \
26 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \
27 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
28 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
29 |
30 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_64(type, ret, ptr, val) \
31 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \
32 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
33 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
34 |
35 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_64(type, ret, ptr, val) \
36 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \
37 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
38 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
39 |
40 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_64(type, ret, ptr, val) \
41 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \
42 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
43 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
44 |
45 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_64(type, ret, ptr, val) \
46 | EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \
47 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
48 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
49 |
50 |
51 | #endif
52 |
53 |
54 | #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
55 |
56 |
57 | #define EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED(ret, observed, val) \
58 | ret = ((observed) ^ (val))
59 |
60 |
61 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_128(type, ret, ptr, val) \
62 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \
63 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
64 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
65 |
66 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_128(type, ret, ptr, val) \
67 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \
68 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
69 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
70 |
71 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_128(type, ret, ptr, val) \
72 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \
73 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
74 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
75 |
76 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_128(type, ret, ptr, val) \
77 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \
78 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
79 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
80 |
81 | #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_128(type, ret, ptr, val) \
82 | EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \
83 | EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
84 | EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
85 |
86 |
87 | #endif
88 |
89 |
90 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H */
91 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_memory_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_MEMORY_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_MEMORY_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 |
15 | /////////////////////////////////////////////////////////////////////////////////
16 | //
17 | // void EASTL_ARCH_ATOMIC_CPU_MB()
18 | //
19 | #if defined(EA_COMPILER_MSVC)
20 |
21 | /**
22 | * NOTE:
23 | * While it makes no sense for a hardware memory barrier to not imply a compiler barrier.
24 | * MSVC docs do not explicitly state that, so better to be safe than sorry chasing down
25 | * hard to find bugs due to the compiler deciding to reorder things.
26 | */
27 |
28 | #if 1
29 |
30 | // 4459 : declaration of 'identifier' hides global declaration
31 | // 4456 : declaration of 'identifier' hides previous local declaration
32 | #define EASTL_ARCH_ATOMIC_CPU_MB() \
33 | { \
34 | EA_DISABLE_VC_WARNING(4459 4456); \
35 | volatile long _; \
36 | _InterlockedExchangeAdd(&_, 0); \
37 | EA_RESTORE_VC_WARNING(); \
38 | }
39 |
40 | #else
41 |
42 | #define EASTL_ARCH_ATOMIC_CPU_MB() \
43 | EASTL_ATOMIC_COMPILER_BARRIER(); \
44 | _mm_mfence(); \
45 | EASTL_ATOMIC_COMPILER_BARRIER()
46 |
47 | #endif
48 |
49 | #elif defined(__clang__) || defined(EA_COMPILER_GNUC)
50 |
51 | /**
52 | * NOTE:
53 | *
54 | * mfence orders all loads/stores to/from all memory types.
55 | * We only care about ordinary cacheable memory so lighter weight locked instruction
56 | * is far faster than a mfence to get a full memory barrier.
57 | * lock; addl against the top of the stack is good because:
58 | * distinct for every thread so prevents false sharing
59 | * that cacheline is most likely cache hot
60 | *
61 | * We intentionally do it below the stack pointer to avoid false RAW register dependencies,
62 | * in cases where the compiler reads from the stack pointer after the lock; addl instruction
63 | *
64 | * Accounting for Red Zones or Cachelines doesn't provide extra benefit.
65 | */
66 |
67 | #if defined(EA_PROCESSOR_X86)
68 |
69 | #define EASTL_ARCH_ATOMIC_CPU_MB() \
70 | __asm__ __volatile__ ("lock; addl $0, -4(%%esp)" ::: "memory", "cc")
71 |
72 | #elif defined(EA_PROCESSOR_X86_64)
73 |
74 | #define EASTL_ARCH_ATOMIC_CPU_MB() \
75 | __asm__ __volatile__ ("lock; addl $0, -8(%%rsp)" ::: "memory", "cc")
76 |
77 | #else
78 |
79 | #define EASTL_ARCH_ATOMIC_CPU_MB() \
80 | __asm__ __volatile__ ("mfence" ::: "memory")
81 |
82 | #endif
83 |
84 |
85 | #endif
86 |
87 |
88 | /////////////////////////////////////////////////////////////////////////////////
89 | //
90 | // void EASTL_ARCH_ATOMIC_CPU_WMB()
91 | //
92 | #define EASTL_ARCH_ATOMIC_CPU_WMB() \
93 | EASTL_ATOMIC_COMPILER_BARRIER()
94 |
95 |
96 | /////////////////////////////////////////////////////////////////////////////////
97 | //
98 | // void EASTL_ARCH_ATOMIC_CPU_RMB()
99 | //
100 | #define EASTL_ARCH_ATOMIC_CPU_RMB() \
101 | EASTL_ATOMIC_COMPILER_BARRIER()
102 |
103 |
104 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_MEMORY_BARRIER_H */
105 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/arch/x86/arch_x86_thread_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_THREAD_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_ARCH_X86_THREAD_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ARCH_ATOMIC_THREAD_FENCE_*()
17 | //
18 | #if defined(EA_COMPILER_MSVC)
19 |
20 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED()
21 |
22 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE() \
23 | EASTL_ATOMIC_COMPILER_BARRIER()
24 |
25 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE() \
26 | EASTL_ATOMIC_COMPILER_BARRIER()
27 |
28 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL() \
29 | EASTL_ATOMIC_COMPILER_BARRIER()
30 |
31 | #endif
32 |
33 |
34 | #if defined(EA_COMPILER_MSVC) || defined(__clang__) || defined(EA_COMPILER_GNUC)
35 |
36 | #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST() \
37 | EASTL_ATOMIC_CPU_MB()
38 |
39 | #endif
40 |
41 |
42 | #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_THREAD_FENCE_H */
43 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_asserts.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_STATIC_ASSERTS_H
7 | #define EASTL_ATOMIC_INTERNAL_STATIC_ASSERTS_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #define EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(type) \
15 | static_assert(!eastl::is_same::value, "eastl::atomic : volatile eastl::atomic is not what you expect! Read the docs in EASTL/atomic.h! Use the memory orders to access the atomic object!");
16 |
17 | #define EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(type) \
18 | static_assert(!eastl::is_same::value, "eastl::atomic : invalid memory order for the given operation!");
19 |
20 | #define EASTL_ATOMIC_STATIC_ASSERT_TYPE(type) \
21 | /* User Provided T must not be cv qualified */ \
22 | static_assert(!eastl::is_const::value, "eastl::atomic : Template Typename T cannot be const!"); \
23 | static_assert(!eastl::is_volatile::value, "eastl::atomic : Template Typename T cannot be volatile! Use the memory orders to access the underlying type for the guarantees you need."); \
24 | /* T must satisfy StandardLayoutType */ \
25 | static_assert(eastl::is_standard_layout::value, "eastl::atomic : Must have standard layout!"); \
26 | /* T must be TriviallyCopyable but it does not have to be TriviallyConstructible */ \
27 | static_assert(eastl::is_trivially_copyable::value, "eastl::atomci : Template Typename T must be trivially copyable!"); \
28 | static_assert(eastl::is_copy_constructible::value, "eastl::atomic : Template Typename T must be copy constructible!"); \
29 | static_assert(eastl::is_move_constructible::value, "eastl::atomic : Template Typename T must be move constructible!"); \
30 | static_assert(eastl::is_copy_assignable::value, "eastl::atomic : Template Typename T must be copy assignable!"); \
31 | static_assert(eastl::is_move_assignable::value, "eastl::atomic : Template Typename T must be move assignable!"); \
32 | static_assert(eastl::is_trivially_destructible::value, "eastl::atomic : Must be trivially destructible!"); \
33 | static_assert(eastl::internal::is_atomic_lockfree_size::value, "eastl::atomic : Template Typename T must be a lockfree size!");
34 |
35 | #define EASTL_ATOMIC_STATIC_ASSERT_TYPE_IS_OBJECT(type) \
36 | static_assert(eastl::is_object::value, "eastl::atomic : Template Typename T must be an object type!");
37 |
38 | #define EASTL_ATOMIC_ASSERT_ALIGNED(alignment) \
39 | EASTL_ASSERT((alignment & (alignment - 1)) == 0); \
40 | EASTL_ASSERT((reinterpret_cast(this) & (alignment - 1)) == 0)
41 |
42 |
43 | namespace eastl
44 | {
45 |
46 |
47 | namespace internal
48 | {
49 |
50 |
51 | template
52 | struct atomic_invalid_type
53 | {
54 | /**
55 | * class Test { int i; int j; int k; }; sizeof(Test) == 96 bits
56 | *
57 | * std::atomic allows non-primitive types to be used for the template type.
58 | * This causes the api to degrade to locking for types that cannot fit into the lockfree size
59 | * of the target platform such as std::atomic leading to performance traps.
60 | *
61 | * If this static_assert() fired, this means your template type T is larger than any atomic instruction
62 | * supported on the given platform.
63 | */
64 |
65 | static_assert(!eastl::is_same::value, "eastl::atomic : invalid template type T!");
66 | };
67 |
68 |
69 | } // namespace internal
70 |
71 |
72 | } // namespace eastl
73 |
74 |
75 | #endif /* EASTL_ATOMIC_INTERNAL_STATIC_ASSERTS_H */
76 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_flag_standalone.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_FLAG_STANDALONE_H
7 | #define EASTL_ATOMIC_INTERNAL_FLAG_STANDALONE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | namespace eastl
15 | {
16 |
17 |
18 | ////////////////////////////////////////////////////////////////////////////////
19 | //
20 | // bool atomic_flag_test_and_set(eastl::atomic*)
21 | //
22 | EASTL_FORCE_INLINE bool atomic_flag_test_and_set(eastl::atomic_flag* atomicObj) EA_NOEXCEPT
23 | {
24 | return atomicObj->test_and_set();
25 | }
26 |
27 | template
28 | EASTL_FORCE_INLINE bool atomic_flag_test_and_set_explicit(eastl::atomic_flag* atomicObj, Order order)
29 | {
30 | return atomicObj->test_and_set(order);
31 | }
32 |
33 |
34 | ////////////////////////////////////////////////////////////////////////////////
35 | //
36 | // bool atomic_flag_clear(eastl::atomic*)
37 | //
38 | EASTL_FORCE_INLINE void atomic_flag_clear(eastl::atomic_flag* atomicObj)
39 | {
40 | atomicObj->clear();
41 | }
42 |
43 | template
44 | EASTL_FORCE_INLINE void atomic_flag_clear_explicit(eastl::atomic_flag* atomicObj, Order order)
45 | {
46 | atomicObj->clear(order);
47 | }
48 |
49 |
50 | ////////////////////////////////////////////////////////////////////////////////
51 | //
52 | // bool atomic_flag_test(eastl::atomic*)
53 | //
54 | EASTL_FORCE_INLINE bool atomic_flag_test(eastl::atomic_flag* atomicObj)
55 | {
56 | return atomicObj->test();
57 | }
58 |
59 | template
60 | EASTL_FORCE_INLINE bool atomic_flag_test_explicit(eastl::atomic_flag* atomicObj, Order order)
61 | {
62 | return atomicObj->test(order);
63 | }
64 |
65 |
66 | } // namespace eastl
67 |
68 |
69 | #endif /* EASTL_ATOMIC_INTERNAL_FLAG_STANDALONE_H */
70 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // The reason for the implementation separating out into a compiler and architecture
17 | // folder is as follows.
18 | //
19 | // The compiler directory is meant to implement atomics using the compiler provided
20 | // intrinsics. This also implies that usually the same compiler instrinsic implementation
21 | // can be used for any architecture the compiler supports. If a compiler provides intrinsics
22 | // to support barriers or atomic operations, then that implementation should be in the
23 | // compiler directory.
24 | //
25 | // The arch directory is meant to manually implement atomics for a specific architecture
26 | // such as power or x86. There may be some compiler specific code in this directory because
27 | // GCC inline assembly syntax may be different than another compiler as an example.
28 | //
29 | // The arch directory can also be used to implement some atomic operations ourselves
30 | // if we deem the compiler provided implementation to be inefficient for the given
31 | // architecture or we need to do some things manually for a given compiler.
32 | //
33 | // The atomic_macros directory implements the macros that the rest of the atomic
34 | // library uses. These macros will expand to either the compiler or arch implemented
35 | // macro. The arch implemented macro is given priority over the compiler implemented
36 | // macro if both are implemented otherwise whichever is implemented is chosen or
37 | // an error is emitted if none are implemented.
38 | //
39 | // The implementation being all macros has a couple nice side effects as well.
40 | //
41 | // 1. All the implementation ends up funneling into one low level macro implementation
42 | // which makes it easy to verify correctness, reduce copy-paste errors and differences
43 | // in various platform implementations.
44 | //
45 | // 2. Allows for the implementation to be implemented efficiently on compilers that do not
46 | // directly implement the C++ memory model in their intrinsics such as msvc.
47 | //
48 | // 3. Allows for the implementation of atomics that may not be supported on the given platform,
49 | // such as 128-bit atomics on 32-bit platforms since the macros will only ever be expanded
50 | // on platforms that support said features. This makes implementing said features pretty easy
51 | // since we do not have to worry about complicated feature detection in the low level implementations.
52 | //
53 | // The macro implementation may asume that all passed in types are trivially constructible thus it is
54 | // free to create local variables of the passed in types as it may please.
55 | // It may also assume that all passed in types are trivially copyable as well.
56 | // It cannot assume any passed in type is any given type thus is a specific type if needed, it must do an
57 | // EASTL_ATOMIC_TYPE_PUN_CAST() to the required type.
58 | //
59 |
60 |
61 | #include "compiler/compiler.h"
62 | #include "arch/arch.h"
63 |
64 | #include "atomic_macros/atomic_macros.h"
65 |
66 |
67 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_H */
68 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros/atomic_macros_base.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_BASE_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_BASE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #define EASTL_ATOMIC_INTERNAL_COMPILER_AVAILABLE(op) \
15 | EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_COMPILER_, op), _AVAILABLE)
16 |
17 | #define EASTL_ATOMIC_INTERNAL_ARCH_AVAILABLE(op) \
18 | EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ARCH_, op), _AVAILABLE)
19 |
20 |
21 | // We can't just use static_assert(false, ...) here, since on MSVC 17.10
22 | // the /Zc:static_assert flag makes non-dependent static_asserts in the body of a template
23 | // be evaluated at template-parse time, rather than at template instantion time.
24 | // So instead we just make the assert dependent on the type.
25 | #define EASTL_ATOMIC_INTERNAL_NOT_IMPLEMENTED_ERROR(...) \
26 | static_assert(!eastl::is_same_v, "eastl::atomic atomic macro not implemented!")
27 |
28 |
29 | /* Compiler && Arch Not Implemented */
30 | #define EASTL_ATOMIC_INTERNAL_OP_PATTERN_00(op) \
31 | EASTL_ATOMIC_INTERNAL_NOT_IMPLEMENTED_ERROR
32 |
33 | /* Arch Implemented */
34 | #define EASTL_ATOMIC_INTERNAL_OP_PATTERN_01(op) \
35 | EA_PREPROCESSOR_JOIN(EASTL_ARCH_, op)
36 |
37 | /* Compiler Implmented */
38 | #define EASTL_ATOMIC_INTERNAL_OP_PATTERN_10(op) \
39 | EA_PREPROCESSOR_JOIN(EASTL_COMPILER_, op)
40 |
41 | /* Compiler && Arch Implemented */
42 | #define EASTL_ATOMIC_INTERNAL_OP_PATTERN_11(op) \
43 | EA_PREPROCESSOR_JOIN(EASTL_ARCH_, op)
44 |
45 |
46 | /* This macro creates the pattern macros above for the 2x2 True-False truth table */
47 | #define EASTL_ATOMIC_INTERNAL_OP_HELPER1(compiler, arch, op) \
48 | EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_INTERNAL_OP_PATTERN_, EA_PREPROCESSOR_JOIN(compiler, arch))(op)
49 |
50 |
51 | /////////////////////////////////////////////////////////////////////////////////
52 | //
53 | // EASTL_ATOMIC_CHOOSE_OP_IMPL
54 | //
55 | // This macro chooses between the compiler or architecture implementation for a
56 | // given atomic operation.
57 | //
58 | // USAGE:
59 | //
60 | // EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_FETCH_ADD_RELAXED_8)(ret, ptr, val)
61 | //
62 | #define EASTL_ATOMIC_CHOOSE_OP_IMPL(op) \
63 | EASTL_ATOMIC_INTERNAL_OP_HELPER1( \
64 | EASTL_ATOMIC_INTERNAL_COMPILER_AVAILABLE(op), \
65 | EASTL_ATOMIC_INTERNAL_ARCH_AVAILABLE(op), \
66 | op \
67 | )
68 |
69 |
70 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_BASE_H */
71 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros/atomic_macros_compiler_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_COMPILER_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_COMPILER_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ATOMIC_COMPILER_BARRIER()
17 | //
18 | #define EASTL_ATOMIC_COMPILER_BARRIER() \
19 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_COMPILER_BARRIER)()
20 |
21 |
22 | /////////////////////////////////////////////////////////////////////////////////
23 | //
24 | // void EASTL_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY(const T&, type)
25 | //
26 | #define EASTL_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY(val, type) \
27 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY)(val, type)
28 |
29 |
30 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_COMPILER_BARRIER_H */
31 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros/atomic_macros_cpu_pause.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_CPU_PAUSE_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_CPU_PAUSE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ATOMIC_CPU_PAUSE()
17 | //
18 | #define EASTL_ATOMIC_CPU_PAUSE() \
19 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_CPU_PAUSE)()
20 |
21 |
22 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_CPU_PAUSE_H */
23 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros/atomic_macros_load.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_LOAD_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_LOAD_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ATOMIC_LOAD_*_N(type, type ret, type * ptr)
17 | //
18 | #define EASTL_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \
19 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_RELAXED_8)(type, ret, ptr)
20 |
21 | #define EASTL_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \
22 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_ACQUIRE_8)(type, ret, ptr)
23 |
24 | #define EASTL_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \
25 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_SEQ_CST_8)(type, ret, ptr)
26 |
27 |
28 | #define EASTL_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \
29 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_RELAXED_16)(type, ret, ptr)
30 |
31 | #define EASTL_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \
32 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_ACQUIRE_16)(type, ret, ptr)
33 |
34 | #define EASTL_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \
35 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_SEQ_CST_16)(type, ret, ptr)
36 |
37 |
38 | #define EASTL_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \
39 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_RELAXED_32)(type, ret, ptr)
40 |
41 | #define EASTL_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \
42 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_ACQUIRE_32)(type, ret, ptr)
43 |
44 | #define EASTL_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \
45 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_SEQ_CST_32)(type, ret, ptr)
46 |
47 |
48 | #define EASTL_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \
49 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_RELAXED_64)(type, ret, ptr)
50 |
51 | #define EASTL_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \
52 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_ACQUIRE_64)(type, ret, ptr)
53 |
54 | #define EASTL_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \
55 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_SEQ_CST_64)(type, ret, ptr)
56 |
57 |
58 | #define EASTL_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \
59 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_RELAXED_128)(type, ret, ptr)
60 |
61 | #define EASTL_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \
62 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_ACQUIRE_128)(type, ret, ptr)
63 |
64 | #define EASTL_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \
65 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_SEQ_CST_128)(type, ret, ptr)
66 |
67 |
68 | #define EASTL_ATOMIC_LOAD_READ_DEPENDS_32(type, ret, ptr) \
69 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_READ_DEPENDS_32)(type, ret, ptr)
70 |
71 | #define EASTL_ATOMIC_LOAD_READ_DEPENDS_64(type, ret, ptr) \
72 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_LOAD_READ_DEPENDS_64)(type, ret, ptr)
73 |
74 |
75 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_LOAD_H */
76 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros/atomic_macros_memory_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_MEMORY_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_MEMORY_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ATOMIC_CPU_MB()
17 | //
18 | #define EASTL_ATOMIC_CPU_MB() \
19 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_CPU_MB)()
20 |
21 |
22 | /////////////////////////////////////////////////////////////////////////////////
23 | //
24 | // void EASTL_ATOMIC_CPU_WMB()
25 | //
26 | #define EASTL_ATOMIC_CPU_WMB() \
27 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_CPU_WMB)()
28 |
29 |
30 | /////////////////////////////////////////////////////////////////////////////////
31 | //
32 | // void EASTL_ATOMIC_CPU_RMB()
33 | //
34 | #define EASTL_ATOMIC_CPU_RMB() \
35 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_CPU_RMB)()
36 |
37 |
38 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_MEMORY_BARRIER_H */
39 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros/atomic_macros_signal_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_SIGNAL_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_SIGNAL_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ATOMIC_SIGNAL_FENCE_*()
17 | //
18 | #define EASTL_ATOMIC_SIGNAL_FENCE_RELAXED() \
19 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_SIGNAL_FENCE_RELAXED)()
20 |
21 | #define EASTL_ATOMIC_SIGNAL_FENCE_ACQUIRE() \
22 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_SIGNAL_FENCE_ACQUIRE)()
23 |
24 | #define EASTL_ATOMIC_SIGNAL_FENCE_RELEASE() \
25 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_SIGNAL_FENCE_RELEASE)()
26 |
27 | #define EASTL_ATOMIC_SIGNAL_FENCE_ACQ_REL() \
28 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_SIGNAL_FENCE_ACQ_REL)()
29 |
30 | #define EASTL_ATOMIC_SIGNAL_FENCE_SEQ_CST() \
31 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_SIGNAL_FENCE_SEQ_CST)()
32 |
33 |
34 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_SIGNAL_FENCE_H */
35 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros/atomic_macros_store.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_STORE_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_STORE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ATOMIC_STORE_*_N(type, type * ptr, type val)
17 | //
18 | #define EASTL_ATOMIC_STORE_RELAXED_8(type, ptr, val) \
19 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELAXED_8)(type, ptr, val)
20 |
21 | #define EASTL_ATOMIC_STORE_RELEASE_8(type, ptr, val) \
22 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELEASE_8)(type, ptr, val)
23 |
24 | #define EASTL_ATOMIC_STORE_SEQ_CST_8(type, ptr, val) \
25 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_SEQ_CST_8)(type, ptr, val)
26 |
27 |
28 | #define EASTL_ATOMIC_STORE_RELAXED_16(type, ptr, val) \
29 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELAXED_16)(type, ptr, val)
30 |
31 | #define EASTL_ATOMIC_STORE_RELEASE_16(type, ptr, val) \
32 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELEASE_16)(type, ptr, val)
33 |
34 | #define EASTL_ATOMIC_STORE_SEQ_CST_16(type, ptr, val) \
35 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_SEQ_CST_16)(type, ptr, val)
36 |
37 |
38 | #define EASTL_ATOMIC_STORE_RELAXED_32(type, ptr, val) \
39 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELAXED_32)(type, ptr, val)
40 |
41 | #define EASTL_ATOMIC_STORE_RELEASE_32(type, ptr, val) \
42 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELEASE_32)(type, ptr, val)
43 |
44 | #define EASTL_ATOMIC_STORE_SEQ_CST_32(type, ptr, val) \
45 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_SEQ_CST_32)(type, ptr, val)
46 |
47 |
48 | #define EASTL_ATOMIC_STORE_RELAXED_64(type, ptr, val) \
49 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELAXED_64)(type, ptr, val)
50 |
51 | #define EASTL_ATOMIC_STORE_RELEASE_64(type, ptr, val) \
52 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELEASE_64)(type, ptr, val)
53 |
54 | #define EASTL_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \
55 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_SEQ_CST_64)(type, ptr, val)
56 |
57 |
58 | #define EASTL_ATOMIC_STORE_RELAXED_128(type, ptr, val) \
59 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELAXED_128)(type, ptr, val)
60 |
61 | #define EASTL_ATOMIC_STORE_RELEASE_128(type, ptr, val) \
62 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_RELEASE_128)(type, ptr, val)
63 |
64 | #define EASTL_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \
65 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_STORE_SEQ_CST_128)(type, ptr, val)
66 |
67 |
68 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_STORE_H */
69 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_macros/atomic_macros_thread_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MACROS_THREAD_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_MACROS_THREAD_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_ATOMIC_THREAD_FENCE_*()
17 | //
18 | #define EASTL_ATOMIC_THREAD_FENCE_RELAXED() \
19 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_THREAD_FENCE_RELAXED)()
20 |
21 | #define EASTL_ATOMIC_THREAD_FENCE_ACQUIRE() \
22 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_THREAD_FENCE_ACQUIRE)()
23 |
24 | #define EASTL_ATOMIC_THREAD_FENCE_RELEASE() \
25 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_THREAD_FENCE_RELEASE)()
26 |
27 | #define EASTL_ATOMIC_THREAD_FENCE_ACQ_REL() \
28 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_THREAD_FENCE_ACQ_REL)()
29 |
30 | #define EASTL_ATOMIC_THREAD_FENCE_SEQ_CST() \
31 | EASTL_ATOMIC_CHOOSE_OP_IMPL(ATOMIC_THREAD_FENCE_SEQ_CST)()
32 |
33 |
34 | #endif /* EASTL_ATOMIC_INTERNAL_MACROS_THREAD_FENCE_H */
35 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/atomic_memory_order.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_MEMORY_ORDER_H
7 | #define EASTL_ATOMIC_INTERNAL_MEMORY_ORDER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | namespace eastl
15 | {
16 |
17 |
18 | namespace internal
19 | {
20 |
21 |
22 | struct memory_order_relaxed_s {};
23 | struct memory_order_read_depends_s {};
24 | struct memory_order_acquire_s {};
25 | struct memory_order_release_s {};
26 | struct memory_order_acq_rel_s {};
27 | struct memory_order_seq_cst_s {};
28 |
29 |
30 | } // namespace internal
31 |
32 |
33 | EASTL_CPP17_INLINE_VARIABLE EA_CONSTEXPR auto memory_order_relaxed = internal::memory_order_relaxed_s{};
34 | EASTL_CPP17_INLINE_VARIABLE EA_CONSTEXPR auto memory_order_read_depends = internal::memory_order_read_depends_s{};
35 | EASTL_CPP17_INLINE_VARIABLE EA_CONSTEXPR auto memory_order_acquire = internal::memory_order_acquire_s{};
36 | EASTL_CPP17_INLINE_VARIABLE EA_CONSTEXPR auto memory_order_release = internal::memory_order_release_s{};
37 | EASTL_CPP17_INLINE_VARIABLE EA_CONSTEXPR auto memory_order_acq_rel = internal::memory_order_acq_rel_s{};
38 | EASTL_CPP17_INLINE_VARIABLE EA_CONSTEXPR auto memory_order_seq_cst = internal::memory_order_seq_cst_s{};
39 |
40 |
41 | } // namespace eastl
42 |
43 |
44 | #endif /* EASTL_ATOMIC_INTERNAL_MEMORY_ORDER_H */
45 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/compiler.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // Include the compiler specific implementations
17 | //
18 | #if defined(EA_COMPILER_GNUC) || defined(__clang__)
19 |
20 | #include "gcc/compiler_gcc.h"
21 |
22 | #elif defined(EA_COMPILER_MSVC)
23 |
24 | #include "msvc/compiler_msvc.h"
25 |
26 | #endif
27 |
28 |
29 | /////////////////////////////////////////////////////////////////////////////////
30 |
31 |
32 | namespace eastl
33 | {
34 |
35 |
36 | namespace internal
37 | {
38 |
39 |
40 | /**
41 | * NOTE:
42 | *
43 | * This can be used by specific compiler implementations to implement a data dependency compiler barrier.
44 | * Some compiler barriers do not take in input dependencies as is possible with the gcc asm syntax.
45 | * Thus we need a way to create a false dependency on the input variable so the compiler does not dead-store
46 | * remove it.
47 | * A volatile function pointer ensures the compiler must always load the function pointer and call thru it
48 | * since the compiler cannot reason about any side effects. Thus the compiler must always assume the
49 | * input variable may be accessed and thus cannot be dead-stored. This technique works even in the presence
50 | * of Link-Time Optimization. A compiler barrier with a data dependency is useful in these situations.
51 | *
52 | * void foo()
53 | * {
54 | * eastl::vector v;
55 | * while (Benchmark.ContinueRunning())
56 | * {
57 | * v.push_back(0);
58 | * eastl::compiler_barrier(); OR eastl::compiler_barrier_data_dependency(v);
59 | * }
60 | * }
61 | *
62 | * We are trying to benchmark the push_back function of a vector. The vector v has only local scope.
63 | * The compiler is well within its writes to remove all accesses to v even with the compiler barrier
64 | * because there are no observable uses of the vector v.
65 | * The compiler barrier data dependency ensures there is an input dependency on the variable so that
66 | * it isn't removed. This is also useful when writing test code that the compiler may remove.
67 | */
68 |
69 | typedef void (*CompilerBarrierDataDependencyFuncPtr)(void*);
70 |
71 | extern EASTL_API volatile CompilerBarrierDataDependencyFuncPtr gCompilerBarrierDataDependencyFunc;
72 |
73 |
74 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY_FUNC(ptr) \
75 | eastl::internal::gCompilerBarrierDataDependencyFunc(ptr)
76 |
77 |
78 | } // namespace internal
79 |
80 |
81 | } // namespace eastl
82 |
83 |
84 | /////////////////////////////////////////////////////////////////////////////////
85 |
86 |
87 | #include "compiler_fetch_add.h"
88 | #include "compiler_fetch_sub.h"
89 |
90 | #include "compiler_fetch_and.h"
91 | #include "compiler_fetch_xor.h"
92 | #include "compiler_fetch_or.h"
93 |
94 | #include "compiler_add_fetch.h"
95 | #include "compiler_sub_fetch.h"
96 |
97 | #include "compiler_and_fetch.h"
98 | #include "compiler_xor_fetch.h"
99 | #include "compiler_or_fetch.h"
100 |
101 | #include "compiler_exchange.h"
102 |
103 | #include "compiler_cmpxchg_weak.h"
104 | #include "compiler_cmpxchg_strong.h"
105 |
106 | #include "compiler_load.h"
107 | #include "compiler_store.h"
108 |
109 | #include "compiler_barrier.h"
110 |
111 | #include "compiler_cpu_pause.h"
112 |
113 | #include "compiler_memory_barrier.h"
114 |
115 | #include "compiler_signal_fence.h"
116 |
117 | #include "compiler_thread_fence.h"
118 |
119 |
120 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_H */
121 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/compiler_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_COMPILER_BARRIER()
17 | //
18 | #if defined(EASTL_COMPILER_ATOMIC_COMPILER_BARRIER)
19 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_AVAILABLE 1
20 | #else
21 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_AVAILABLE 0
22 | #endif
23 |
24 |
25 | /////////////////////////////////////////////////////////////////////////////////
26 | //
27 | // void EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY(const T&, type)
28 | //
29 | #if defined(EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY)
30 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY_AVAILABLE 1
31 | #else
32 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY_AVAILABLE 0
33 | #endif
34 |
35 |
36 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_BARRIER_H */
37 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/compiler_cpu_pause.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_CPU_PAUSE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_CPU_PAUSE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_CPU_PAUSE()
17 | //
18 | #if defined(EASTL_COMPILER_ATOMIC_CPU_PAUSE)
19 |
20 | #define EASTL_COMPILER_ATOMIC_CPU_PAUSE_AVAILABLE 1
21 |
22 | #else
23 |
24 | #define EASTL_COMPILER_ATOMIC_CPU_PAUSE() \
25 | ((void)0)
26 |
27 | #define EASTL_COMPILER_ATOMIC_CPU_PAUSE_AVAILABLE 1
28 |
29 | #endif
30 |
31 |
32 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_CPU_PAUSE_H */
33 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/compiler_memory_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_MEMORY_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_MEMORY_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_CPU_MB()
17 | //
18 | #if defined(EASTL_COMPILER_ATOMIC_CPU_MB)
19 | #define EASTL_COMPILER_ATOMIC_CPU_MB_AVAILABLE 1
20 | #else
21 | #define EASTL_COMPILER_ATOMIC_CPU_MB_AVAILABLE 0
22 | #endif
23 |
24 |
25 | /////////////////////////////////////////////////////////////////////////////////
26 | //
27 | // void EASTL_COMPILER_ATOMIC_CPU_WMB()
28 | //
29 | #if defined(EASTL_COMPILER_ATOMIC_CPU_WMB)
30 | #define EASTL_COMPILER_ATOMIC_CPU_WMB_AVAILABLE 1
31 | #else
32 | #define EASTL_COMPILER_ATOMIC_CPU_WMB_AVAILABLE 0
33 | #endif
34 |
35 |
36 | /////////////////////////////////////////////////////////////////////////////////
37 | //
38 | // void EASTL_COMPILER_ATOMIC_CPU_RMB()
39 | //
40 | #if defined(EASTL_COMPILER_ATOMIC_CPU_RMB)
41 | #define EASTL_COMPILER_ATOMIC_CPU_RMB_AVAILABLE 1
42 | #else
43 | #define EASTL_COMPILER_ATOMIC_CPU_RMB_AVAILABLE 0
44 | #endif
45 |
46 |
47 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_MEMORY_BARRIER_H */
48 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/compiler_signal_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_SIGNAL_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_SIGNAL_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_*()
17 | //
18 | #if defined(EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELAXED)
19 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELAXED_AVAILABLE 1
20 | #else
21 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELAXED_AVAILABLE 0
22 | #endif
23 |
24 | #if defined(EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQUIRE)
25 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQUIRE_AVAILABLE 1
26 | #else
27 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQUIRE_AVAILABLE 0
28 | #endif
29 |
30 | #if defined(EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELEASE)
31 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELEASE_AVAILABLE 1
32 | #else
33 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELEASE_AVAILABLE 0
34 | #endif
35 |
36 | #if defined(EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQ_REL)
37 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQ_REL_AVAILABLE 1
38 | #else
39 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQ_REL_AVAILABLE 0
40 | #endif
41 |
42 | #if defined(EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_SEQ_CST)
43 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_SEQ_CST_AVAILABLE 1
44 | #else
45 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_SEQ_CST_AVAILABLE 0
46 | #endif
47 |
48 |
49 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_SIGNAL_FENCE_H */
50 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/compiler_store.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_STORE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_STORE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_STORE_*_N(type, type * ptr, type val)
17 | //
18 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELAXED_8)
19 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_8_AVAILABLE 1
20 | #else
21 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_8_AVAILABLE 0
22 | #endif
23 |
24 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELEASE_8)
25 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_8_AVAILABLE 1
26 | #else
27 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_8_AVAILABLE 0
28 | #endif
29 |
30 | #if defined(EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_8)
31 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_8_AVAILABLE 1
32 | #else
33 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_8_AVAILABLE 0
34 | #endif
35 |
36 |
37 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELAXED_16)
38 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_16_AVAILABLE 1
39 | #else
40 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_16_AVAILABLE 0
41 | #endif
42 |
43 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELEASE_16)
44 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_16_AVAILABLE 1
45 | #else
46 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_16_AVAILABLE 0
47 | #endif
48 |
49 | #if defined(EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_16)
50 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_16_AVAILABLE 1
51 | #else
52 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_16_AVAILABLE 0
53 | #endif
54 |
55 |
56 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELAXED_32)
57 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_32_AVAILABLE 1
58 | #else
59 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_32_AVAILABLE 0
60 | #endif
61 |
62 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELEASE_32)
63 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_32_AVAILABLE 1
64 | #else
65 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_32_AVAILABLE 0
66 | #endif
67 |
68 | #if defined(EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_32)
69 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_32_AVAILABLE 1
70 | #else
71 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_32_AVAILABLE 0
72 | #endif
73 |
74 |
75 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELAXED_64)
76 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_64_AVAILABLE 1
77 | #else
78 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_64_AVAILABLE 0
79 | #endif
80 |
81 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELEASE_64)
82 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_64_AVAILABLE 1
83 | #else
84 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_64_AVAILABLE 0
85 | #endif
86 |
87 | #if defined(EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_64)
88 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_64_AVAILABLE 1
89 | #else
90 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_64_AVAILABLE 0
91 | #endif
92 |
93 |
94 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELAXED_128)
95 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_128_AVAILABLE 1
96 | #else
97 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_128_AVAILABLE 0
98 | #endif
99 |
100 | #if defined(EASTL_COMPILER_ATOMIC_STORE_RELEASE_128)
101 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_128_AVAILABLE 1
102 | #else
103 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_128_AVAILABLE 0
104 | #endif
105 |
106 | #if defined(EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_128)
107 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_128_AVAILABLE 1
108 | #else
109 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_128_AVAILABLE 0
110 | #endif
111 |
112 |
113 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_STORE_H */
114 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/compiler_thread_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_THREAD_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_THREAD_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_THREAD_FENCE_*()
17 | //
18 | #if defined(EASTL_COMPILER_ATOMIC_THREAD_FENCE_RELAXED)
19 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_RELAXED_AVAILABLE 1
20 | #else
21 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_RELAXED_AVAILABLE 0
22 | #endif
23 |
24 | #if defined(EASTL_COMPILER_ATOMIC_THREAD_FENCE_ACQUIRE)
25 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_ACQUIRE_AVAILABLE 1
26 | #else
27 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_ACQUIRE_AVAILABLE 0
28 | #endif
29 |
30 | #if defined(EASTL_COMPILER_ATOMIC_THREAD_FENCE_RELEASE)
31 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_RELEASE_AVAILABLE 1
32 | #else
33 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_RELEASE_AVAILABLE 0
34 | #endif
35 |
36 | #if defined(EASTL_COMPILER_ATOMIC_THREAD_FENCE_ACQ_REL)
37 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_ACQ_REL_AVAILABLE 1
38 | #else
39 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_ACQ_REL_AVAILABLE 0
40 | #endif
41 |
42 | #if defined(EASTL_COMPILER_ATOMIC_THREAD_FENCE_SEQ_CST)
43 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_SEQ_CST_AVAILABLE 1
44 | #else
45 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_SEQ_CST_AVAILABLE 0
46 | #endif
47 |
48 |
49 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_THREAD_FENCE_H */
50 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/gcc/compiler_gcc_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_GCC_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_GCC_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_COMPILER_BARRIER()
17 | //
18 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER() \
19 | __asm__ __volatile__ ("" ::: "memory")
20 |
21 |
22 | /////////////////////////////////////////////////////////////////////////////////
23 | //
24 | // void EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY(const T&, type)
25 | //
26 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY(val, type) \
27 | __asm__ __volatile__ ("" : /* Output Operands */ : "r"(&(val)) : "memory")
28 |
29 |
30 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_GCC_BARRIER_H */
31 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/gcc/compiler_gcc_cpu_pause.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_GCC_CPU_PAUSE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_GCC_CPU_PAUSE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_CPU_PAUSE()
17 | //
18 | #if defined(EA_PROCESSOR_X86) || defined(EA_PROCESSOR_X86_64)
19 |
20 | #define EASTL_COMPILER_ATOMIC_CPU_PAUSE() \
21 | __asm__ __volatile__ ("pause")
22 |
23 | #elif defined(EA_PROCESSOR_ARM32) || defined(EA_PROCESSOR_ARM64)
24 |
25 | #define EASTL_COMPILER_ATOMIC_CPU_PAUSE() \
26 | __asm__ __volatile__ ("yield")
27 |
28 | #endif
29 |
30 |
31 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_GCC_CPU_PAUSE_H */
32 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/gcc/compiler_gcc_load.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_GCC_LOAD_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_GCC_LOAD_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #define EASTL_GCC_ATOMIC_LOAD_N(integralType, type, ret, ptr, gccMemoryOrder) \
15 | { \
16 | integralType retIntegral; \
17 | __atomic_load(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), &retIntegral, gccMemoryOrder); \
18 | \
19 | ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
20 | }
21 |
22 | #define EASTL_GCC_ATOMIC_LOAD_8(type, ret, ptr, gccMemoryOrder) \
23 | EASTL_GCC_ATOMIC_LOAD_N(uint8_t, type, ret, ptr, gccMemoryOrder)
24 |
25 | #define EASTL_GCC_ATOMIC_LOAD_16(type, ret, ptr, gccMemoryOrder) \
26 | EASTL_GCC_ATOMIC_LOAD_N(uint16_t, type, ret, ptr, gccMemoryOrder)
27 |
28 | #define EASTL_GCC_ATOMIC_LOAD_32(type, ret, ptr, gccMemoryOrder) \
29 | EASTL_GCC_ATOMIC_LOAD_N(uint32_t, type, ret, ptr, gccMemoryOrder)
30 |
31 | #define EASTL_GCC_ATOMIC_LOAD_64(type, ret, ptr, gccMemoryOrder) \
32 | EASTL_GCC_ATOMIC_LOAD_N(uint64_t, type, ret, ptr, gccMemoryOrder)
33 |
34 | #define EASTL_GCC_ATOMIC_LOAD_128(type, ret, ptr, gccMemoryOrder) \
35 | EASTL_GCC_ATOMIC_LOAD_N(__uint128_t, type, ret, ptr, gccMemoryOrder)
36 |
37 |
38 | /////////////////////////////////////////////////////////////////////////////////
39 | //
40 | // void EASTL_COMPILER_ATOMIC_LOAD_*_N(type, type ret, type * ptr)
41 | //
42 | #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \
43 | EASTL_GCC_ATOMIC_LOAD_8(type, ret, ptr, __ATOMIC_RELAXED)
44 |
45 | #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \
46 | EASTL_GCC_ATOMIC_LOAD_16(type, ret, ptr, __ATOMIC_RELAXED)
47 |
48 | #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \
49 | EASTL_GCC_ATOMIC_LOAD_32(type, ret, ptr, __ATOMIC_RELAXED)
50 |
51 | #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \
52 | EASTL_GCC_ATOMIC_LOAD_64(type, ret, ptr, __ATOMIC_RELAXED)
53 |
54 | #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \
55 | EASTL_GCC_ATOMIC_LOAD_128(type, ret, ptr, __ATOMIC_RELAXED)
56 |
57 |
58 | #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \
59 | EASTL_GCC_ATOMIC_LOAD_8(type, ret, ptr, __ATOMIC_ACQUIRE)
60 |
61 | #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \
62 | EASTL_GCC_ATOMIC_LOAD_16(type, ret, ptr, __ATOMIC_ACQUIRE)
63 |
64 | #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \
65 | EASTL_GCC_ATOMIC_LOAD_32(type, ret, ptr, __ATOMIC_ACQUIRE)
66 |
67 | #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \
68 | EASTL_GCC_ATOMIC_LOAD_64(type, ret, ptr, __ATOMIC_ACQUIRE)
69 |
70 | #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \
71 | EASTL_GCC_ATOMIC_LOAD_128(type, ret, ptr, __ATOMIC_ACQUIRE)
72 |
73 |
74 | #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \
75 | EASTL_GCC_ATOMIC_LOAD_8(type, ret, ptr, __ATOMIC_SEQ_CST)
76 |
77 | #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \
78 | EASTL_GCC_ATOMIC_LOAD_16(type, ret, ptr, __ATOMIC_SEQ_CST)
79 |
80 | #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \
81 | EASTL_GCC_ATOMIC_LOAD_32(type, ret, ptr, __ATOMIC_SEQ_CST)
82 |
83 | #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \
84 | EASTL_GCC_ATOMIC_LOAD_64(type, ret, ptr, __ATOMIC_SEQ_CST)
85 |
86 | #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \
87 | EASTL_GCC_ATOMIC_LOAD_128(type, ret, ptr, __ATOMIC_SEQ_CST)
88 |
89 |
90 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_GCC_LOAD_H */
91 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/gcc/compiler_gcc_signal_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_GCC_SIGNAL_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_GCC_SIGNAL_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #define EASTL_GCC_ATOMIC_SIGNAL_FENCE(gccMemoryOrder) \
15 | __atomic_signal_fence(gccMemoryOrder)
16 |
17 |
18 | /////////////////////////////////////////////////////////////////////////////////
19 | //
20 | // void EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_*()
21 | //
22 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELAXED() \
23 | EASTL_GCC_ATOMIC_SIGNAL_FENCE(__ATOMIC_RELAXED)
24 |
25 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQUIRE() \
26 | EASTL_GCC_ATOMIC_SIGNAL_FENCE(__ATOMIC_ACQUIRE)
27 |
28 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELEASE() \
29 | EASTL_GCC_ATOMIC_SIGNAL_FENCE(__ATOMIC_RELEASE)
30 |
31 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQ_REL() \
32 | EASTL_GCC_ATOMIC_SIGNAL_FENCE(__ATOMIC_ACQ_REL)
33 |
34 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_SEQ_CST() \
35 | EASTL_GCC_ATOMIC_SIGNAL_FENCE(__ATOMIC_SEQ_CST)
36 |
37 |
38 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_GCC_SIGNAL_FENCE_H */
39 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/gcc/compiler_gcc_store.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_GCC_STORE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_GCC_STORE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #define EASTL_GCC_ATOMIC_STORE_N(integralType, ptr, val, gccMemoryOrder) \
15 | { \
16 | integralType valIntegral = EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)); \
17 | __atomic_store(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), &valIntegral, gccMemoryOrder); \
18 | }
19 |
20 |
21 | #define EASTL_GCC_ATOMIC_STORE_8(ptr, val, gccMemoryOrder) \
22 | EASTL_GCC_ATOMIC_STORE_N(uint8_t, ptr, val, gccMemoryOrder)
23 |
24 | #define EASTL_GCC_ATOMIC_STORE_16(ptr, val, gccMemoryOrder) \
25 | EASTL_GCC_ATOMIC_STORE_N(uint16_t, ptr, val, gccMemoryOrder)
26 |
27 | #define EASTL_GCC_ATOMIC_STORE_32(ptr, val, gccMemoryOrder) \
28 | EASTL_GCC_ATOMIC_STORE_N(uint32_t, ptr, val, gccMemoryOrder)
29 |
30 | #define EASTL_GCC_ATOMIC_STORE_64(ptr, val, gccMemoryOrder) \
31 | EASTL_GCC_ATOMIC_STORE_N(uint64_t, ptr, val, gccMemoryOrder)
32 |
33 | #define EASTL_GCC_ATOMIC_STORE_128(ptr, val, gccMemoryOrder) \
34 | EASTL_GCC_ATOMIC_STORE_N(__uint128_t, ptr, val, gccMemoryOrder)
35 |
36 |
37 | /////////////////////////////////////////////////////////////////////////////////
38 | //
39 | // void EASTL_COMPILER_ATOMIC_STORE_*_N(type, type * ptr, type val)
40 | //
41 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_8(type, ptr, val) \
42 | EASTL_GCC_ATOMIC_STORE_8(ptr, val, __ATOMIC_RELAXED)
43 |
44 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_16(type, ptr, val) \
45 | EASTL_GCC_ATOMIC_STORE_16(ptr, val, __ATOMIC_RELAXED)
46 |
47 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_32(type, ptr, val) \
48 | EASTL_GCC_ATOMIC_STORE_32(ptr, val, __ATOMIC_RELAXED)
49 |
50 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_64(type, ptr, val) \
51 | EASTL_GCC_ATOMIC_STORE_64(ptr, val, __ATOMIC_RELAXED)
52 |
53 | #define EASTL_COMPILER_ATOMIC_STORE_RELAXED_128(type, ptr, val) \
54 | EASTL_GCC_ATOMIC_STORE_128(ptr, val, __ATOMIC_RELAXED)
55 |
56 |
57 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_8(type, ptr, val) \
58 | EASTL_GCC_ATOMIC_STORE_8(ptr, val, __ATOMIC_RELEASE)
59 |
60 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_16(type, ptr, val) \
61 | EASTL_GCC_ATOMIC_STORE_16(ptr, val, __ATOMIC_RELEASE)
62 |
63 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_32(type, ptr, val) \
64 | EASTL_GCC_ATOMIC_STORE_32(ptr, val, __ATOMIC_RELEASE)
65 |
66 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_64(type, ptr, val) \
67 | EASTL_GCC_ATOMIC_STORE_64(ptr, val, __ATOMIC_RELEASE)
68 |
69 | #define EASTL_COMPILER_ATOMIC_STORE_RELEASE_128(type, ptr, val) \
70 | EASTL_GCC_ATOMIC_STORE_128(ptr, val, __ATOMIC_RELEASE)
71 |
72 |
73 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_8(type, ptr, val) \
74 | EASTL_GCC_ATOMIC_STORE_8(ptr, val, __ATOMIC_SEQ_CST)
75 |
76 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_16(type, ptr, val) \
77 | EASTL_GCC_ATOMIC_STORE_16(ptr, val, __ATOMIC_SEQ_CST)
78 |
79 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_32(type, ptr, val) \
80 | EASTL_GCC_ATOMIC_STORE_32(ptr, val, __ATOMIC_SEQ_CST)
81 |
82 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \
83 | EASTL_GCC_ATOMIC_STORE_64(ptr, val, __ATOMIC_SEQ_CST)
84 |
85 | #define EASTL_COMPILER_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \
86 | EASTL_GCC_ATOMIC_STORE_128(ptr, val, __ATOMIC_SEQ_CST)
87 |
88 |
89 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_GCC_STORE_H */
90 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/gcc/compiler_gcc_thread_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_GCC_THREAD_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_GCC_THREAD_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | #define EASTL_GCC_ATOMIC_THREAD_FENCE(gccMemoryOrder) \
15 | __atomic_thread_fence(gccMemoryOrder)
16 |
17 |
18 | /////////////////////////////////////////////////////////////////////////////////
19 | //
20 | // void EASTL_COMPILER_ATOMIC_THREAD_FENCE_*()
21 | //
22 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_RELAXED() \
23 | EASTL_GCC_ATOMIC_THREAD_FENCE(__ATOMIC_RELAXED)
24 |
25 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_ACQUIRE() \
26 | EASTL_GCC_ATOMIC_THREAD_FENCE(__ATOMIC_ACQUIRE)
27 |
28 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_RELEASE() \
29 | EASTL_GCC_ATOMIC_THREAD_FENCE(__ATOMIC_RELEASE)
30 |
31 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_ACQ_REL() \
32 | EASTL_GCC_ATOMIC_THREAD_FENCE(__ATOMIC_ACQ_REL)
33 |
34 | #define EASTL_COMPILER_ATOMIC_THREAD_FENCE_SEQ_CST() \
35 | EASTL_GCC_ATOMIC_THREAD_FENCE(__ATOMIC_SEQ_CST)
36 |
37 |
38 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_GCC_THREAD_FENCE_H */
39 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_barrier.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_BARRIER_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_BARRIER_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_COMPILER_BARRIER()
17 | //
18 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER() \
19 | EA_DISABLE_CLANG_WARNING(-Wdeprecated-declarations) \
20 | _ReadWriteBarrier() \
21 | EA_RESTORE_CLANG_WARNING()
22 |
23 |
24 | /////////////////////////////////////////////////////////////////////////////////
25 | //
26 | // void EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY(const T&, type)
27 | //
28 | #define EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY(val, type) \
29 | EASTL_COMPILER_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY_FUNC(const_cast(eastl::addressof((val)))); \
30 | EASTL_ATOMIC_COMPILER_BARRIER()
31 |
32 |
33 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_BARRIER_H */
34 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_cpu_pause.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // copyright (c) electronic arts inc. all rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_CPU_PAUSE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_CPU_PAUSE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 | #if defined(EA_PROCESSOR_X86) || defined(EA_PROCESSOR_X86_64)
14 | #define EASTL_COMPILER_ATOMIC_CPU_PAUSE() _mm_pause()
15 | #elif defined(EA_PROCESSOR_ARM32) || defined(EA_PROCESSOR_ARM64)
16 | #define EASTL_COMPILER_ATOMIC_CPU_PAUSE() __yield()
17 | #else
18 | #error Unsupported CPU architecture for EASTL_COMPILER_ATOMIC_CPU_PAUSE
19 | #endif
20 |
21 |
22 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_CPU_PAUSE_H */
23 |
--------------------------------------------------------------------------------
/include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_signal_fence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_SIGNAL_FENCE_H
7 | #define EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_SIGNAL_FENCE_H
8 |
9 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10 | #pragma once
11 | #endif
12 |
13 |
14 | /////////////////////////////////////////////////////////////////////////////////
15 | //
16 | // void EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_*()
17 | //
18 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELAXED() \
19 | EASTL_ATOMIC_COMPILER_BARRIER()
20 |
21 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQUIRE() \
22 | EASTL_ATOMIC_COMPILER_BARRIER()
23 |
24 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_RELEASE() \
25 | EASTL_ATOMIC_COMPILER_BARRIER()
26 |
27 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_ACQ_REL() \
28 | EASTL_ATOMIC_COMPILER_BARRIER()
29 |
30 | #define EASTL_COMPILER_ATOMIC_SIGNAL_FENCE_SEQ_CST() \
31 | EASTL_ATOMIC_COMPILER_BARRIER()
32 |
33 |
34 | #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_SIGNAL_FENCE_H */
35 |
--------------------------------------------------------------------------------
/include/EASTL/internal/concepts.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_INTERNAL_CONCEPTS_H
7 | #define EASTL_INTERNAL_CONCEPTS_H
8 |
9 |
10 | #include
11 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
12 | #pragma once
13 | #endif
14 |
15 | #include
16 |
17 | namespace eastl
18 | {
19 | namespace internal
20 | {
21 | template
22 | using detect_explicitely_convertible = decltype(static_cast(declval()));
23 |
24 | namespace concepts
25 | {
26 | template
27 | constexpr bool destructible = is_nothrow_destructible_v;
28 |
29 | template
30 | constexpr bool constructible_from = destructible && is_constructible_v;
31 |
32 | template
33 | constexpr bool convertible_to =
34 | is_convertible_v && is_detected_v;
35 |
36 | template
37 | constexpr bool move_constructible = constructible_from && convertible_to;
38 |
39 | template
40 | constexpr bool copy_constructible =
41 | move_constructible && constructible_from && convertible_to &&
42 | constructible_from && convertible_to && constructible_from &&
43 | convertible_to;
44 | } // namespace concepts
45 | } // namespace internal
46 | } // namespace eastl
47 |
48 | #endif
--------------------------------------------------------------------------------
/include/EASTL/internal/enable_shared.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_INTERNAL_ENABLE_SHARED_H
7 | #define EASTL_INTERNAL_ENABLE_SHARED_H
8 |
9 |
10 | #include
11 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
12 | #pragma once
13 | #endif
14 |
15 | namespace eastl
16 | {
17 |
18 | /// enable_shared_from_this
19 | ///
20 | /// This is a helper mixin class that allows you to make any class
21 | /// export a shared_ptr instance that is associated with the class
22 | /// instance. Any class that inherits from this class gets two functions:
23 | /// shared_ptr shared_from_this();
24 | /// shared_ptr shared_from_this() const;
25 | /// If you call shared_from_this, you get back a shared_ptr that
26 | /// refers to the class. A second call to shared_from_this returns
27 | /// another shared_ptr that is shared with the first one.
28 | ///
29 | /// The trick that happens which is not so obvious here (and which is
30 | /// not mentioned at all in the Boost documentation of their version
31 | /// of this) is that the shared_ptr constructor detects that the
32 | /// class has an enable_shared_from_this mixin and sets up this system
33 | /// automatically for the user. This is done with template tricks.
34 | ///
35 | /// For some additional explanation, see the Boost documentation for
36 | /// their description of their version of enable_shared_from_this.
37 | ///
38 | template
39 | class enable_shared_from_this
40 | {
41 | public:
42 | shared_ptr shared_from_this()
43 | { return shared_ptr(mWeakPtr); }
44 |
45 | shared_ptr shared_from_this() const
46 | { return shared_ptr(mWeakPtr); }
47 |
48 | weak_ptr weak_from_this()
49 | { return mWeakPtr; }
50 |
51 | weak_ptr weak_from_this() const
52 | { return mWeakPtr; }
53 |
54 | public: // This is public because the alternative fails on some compilers that we need to support.
55 | mutable weak_ptr mWeakPtr;
56 |
57 | protected:
58 | template friend class shared_ptr;
59 |
60 | EA_CONSTEXPR enable_shared_from_this() EA_NOEXCEPT
61 | { }
62 |
63 | enable_shared_from_this(const enable_shared_from_this&) EA_NOEXCEPT
64 | { }
65 |
66 | enable_shared_from_this& operator=(const enable_shared_from_this&) EA_NOEXCEPT
67 | { return *this; }
68 |
69 | ~enable_shared_from_this()
70 | { }
71 |
72 | }; // enable_shared_from_this
73 |
74 | } // namespace eastl
75 |
76 |
77 | #endif // Header include guard
78 |
79 |
80 |
81 |
82 |
83 |
84 |
--------------------------------------------------------------------------------
/include/EASTL/internal/function_help.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | #ifndef EASTL_INTERNAL_FUNCTION_HELP_H
6 | #define EASTL_INTERNAL_FUNCTION_HELP_H
7 |
8 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
9 | #pragma once
10 | #endif
11 |
12 | #include
13 | #include
14 |
15 | namespace eastl
16 | {
17 | namespace internal
18 | {
19 |
20 | //////////////////////////////////////////////////////////////////////
21 | // is_null
22 | //
23 | template
24 | bool is_null(const T&)
25 | {
26 | return false;
27 | }
28 |
29 | template
30 | bool is_null(Result (*const& function_pointer)(Arguments...))
31 | {
32 | return function_pointer == nullptr;
33 | }
34 |
35 | template
36 | bool is_null(Result (Class::*const& function_pointer)(Arguments...))
37 | {
38 | return function_pointer == nullptr;
39 | }
40 |
41 | template
42 | bool is_null(Result (Class::*const& function_pointer)(Arguments...) const)
43 | {
44 | return function_pointer == nullptr;
45 | }
46 |
47 | } // namespace internal
48 | } // namespace eastl
49 |
50 | #endif // Header include guard
51 |
52 |
--------------------------------------------------------------------------------
/include/EASTL/internal/in_place_t.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_INTERNAL_IN_PLACE_T_H
7 | #define EASTL_INTERNAL_IN_PLACE_T_H
8 |
9 |
10 | #include
11 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
12 | #pragma once
13 | #endif
14 |
15 | namespace eastl
16 | {
17 | namespace Internal
18 | {
19 | struct in_place_tag {};
20 | template struct in_place_type_tag {};
21 | template struct in_place_index_tag {};
22 | }
23 |
24 | ///////////////////////////////////////////////////////////////////////////////
25 | /// in_place_tag
26 | ///
27 | /// http://en.cppreference.com/w/cpp/utility/in_place_tag
28 | ///
29 | struct in_place_tag
30 | {
31 | in_place_tag() = delete;
32 |
33 | private:
34 | explicit in_place_tag(Internal::in_place_tag) {}
35 | friend inline in_place_tag Internal_ConstructInPlaceTag();
36 | };
37 |
38 | // internal factory function for in_place_tag
39 | inline in_place_tag Internal_ConstructInPlaceTag() { return in_place_tag(Internal::in_place_tag{}); }
40 |
41 |
42 | ///////////////////////////////////////////////////////////////////////////////
43 | /// in_place_t / in_place_type_t / in_place_index_t
44 | ///
45 | /// used to disambiguate overloads that take arguments (possibly a parameter
46 | /// pack) for in-place construction of some value.
47 | ///
48 | /// http://en.cppreference.com/w/cpp/utility/optional/in_place_t
49 | ///
50 | using in_place_t = in_place_tag(&)(Internal::in_place_tag);
51 |
52 | template
53 | using in_place_type_t = in_place_tag(&)(Internal::in_place_type_tag);
54 |
55 | template
56 | using in_place_index_t = in_place_tag(&)(Internal::in_place_index_tag);
57 |
58 |
59 | ///////////////////////////////////////////////////////////////////////////////
60 | /// in_place / in_place / in_place
61 | ///
62 | /// http://en.cppreference.com/w/cpp/utility/in_place
63 | ///
64 | inline in_place_tag in_place(Internal::in_place_tag) { return Internal_ConstructInPlaceTag(); }
65 |
66 | template
67 | inline in_place_tag in_place(Internal::in_place_type_tag) { return Internal_ConstructInPlaceTag(); }
68 |
69 | template
70 | inline in_place_tag in_place(Internal::in_place_index_tag) { return Internal_ConstructInPlaceTag(); }
71 |
72 |
73 | } // namespace eastl
74 |
75 |
76 | #endif // Header include guard
77 |
78 |
79 |
80 |
81 |
82 |
83 |
--------------------------------------------------------------------------------
/include/EASTL/internal/integer_sequence.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | #ifndef EASTL_INTEGER_SEQUENCE_H
6 | #define EASTL_INTEGER_SEQUENCE_H
7 |
8 | #include
9 | #include
10 | #include
11 |
12 | namespace eastl
13 | {
14 |
15 | #if EASTL_VARIADIC_TEMPLATES_ENABLED && !defined(EA_COMPILER_NO_TEMPLATE_ALIASES)
16 |
17 | // integer_sequence
18 | template
19 | class integer_sequence
20 | {
21 | public:
22 | typedef T value_type;
23 | static_assert(is_integral::value, "eastl::integer_sequence can only be instantiated with an integral type");
24 | static EA_CONSTEXPR size_t size() EA_NOEXCEPT { return sizeof...(Ints); }
25 | };
26 |
27 | template
28 | using index_sequence = integer_sequence;
29 |
30 | #if (defined(EA_COMPILER_GNUC) && EA_COMPILER_VERSION >= 8001)
31 |
32 | template
33 | using make_integer_sequence = integer_sequence;
34 |
35 | #elif (defined(EA_COMPILER_CLANG) && EA_COMPILER_HAS_BUILTIN(__make_integer_seq)) || (defined(EA_COMPILER_MSVC) && (EA_COMPILER_VERSION >= 1910))
36 |
37 | template
38 | using make_integer_sequence = __make_integer_seq;
39 |
40 | #else
41 |
42 | template
43 | struct make_index_sequence_impl;
44 |
45 | template
46 | struct make_index_sequence_impl>
47 | {
48 | typedef typename make_index_sequence_impl>::type type;
49 | };
50 |
51 | template
52 | struct make_index_sequence_impl<0, integer_sequence>
53 | {
54 | typedef integer_sequence type;
55 | };
56 |
57 | template
58 | struct integer_sequence_convert_impl;
59 |
60 | template
61 | struct integer_sequence_convert_impl>
62 | {
63 | typedef integer_sequence type;
64 | };
65 |
66 | template
67 | struct make_integer_sequence_impl
68 | {
69 | typedef typename integer_sequence_convert_impl>::type>::type type;
70 | };
71 |
72 | template
73 | using make_integer_sequence = typename make_integer_sequence_impl::type;
74 |
75 | #endif
76 |
77 | template
78 | using make_index_sequence = make_integer_sequence;
79 |
80 | // Helper alias template that converts any type parameter pack into an index sequence of the same length
81 | template
82 | using index_sequence_for = make_index_sequence;
83 |
84 | namespace internal
85 | {
86 |
87 | template
88 | struct integer_sequence_size_helper;
89 |
90 | template
91 | struct integer_sequence_size_helper> : public integral_constant
92 | {
93 | };
94 |
95 | template
96 | struct integer_sequence_size : public integer_sequence_size_helper>
97 | {
98 | };
99 |
100 | template
101 | struct index_sequence_size : public integer_sequence_size_helper>
102 | {
103 | };
104 |
105 | template
106 | EASTL_CPP17_INLINE_VARIABLE EA_CONSTEXPR size_t integer_sequence_size_v = integer_sequence_size::value;
107 |
108 | template
109 | EASTL_CPP17_INLINE_VARIABLE EA_CONSTEXPR size_t index_sequence_size_v = index_sequence_size::value;
110 |
111 |
112 | } // namespace internal
113 |
114 | #endif // EASTL_VARIADIC_TEMPLATES_ENABLED
115 |
116 | } // namespace eastl
117 |
118 | #endif // EASTL_INTEGER_SEQUENCE_H
119 |
--------------------------------------------------------------------------------
/include/EASTL/internal/memory_base.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | #ifndef EASTL_INTERNAL_MEMORY_BASE_H
6 | #define EASTL_INTERNAL_MEMORY_BASE_H
7 |
8 | #include
9 |
10 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
11 | #pragma once // Some compilers (e.g. VC++) benefit significantly from using this. We've measured 3-4% build speed improvements in apps as a result.
12 | #endif
13 |
14 |
15 | ////////////////////////////////////////////////////////////////////////////////////////////
16 | // This file contains basic functionality found in the standard library 'memory' header that
17 | // have limited or no dependencies. This allows us to utilize these utilize these functions
18 | // in other EASTL code while avoid circular dependencies.
19 | ////////////////////////////////////////////////////////////////////////////////////////////
20 |
21 | namespace eastl
22 | {
23 | /// addressof
24 | ///
25 | /// From the C++11 Standard, section 20.6.12.1
26 | /// Returns the actual address of the object or function referenced by r, even in the presence of an overloaded operator&.
27 | ///
28 | template
29 | T* addressof(T& value) EA_NOEXCEPT
30 | {
31 | return reinterpret_cast(&const_cast(reinterpret_cast(value)));
32 | }
33 |
34 | } // namespace eastl
35 |
36 | #endif // EASTL_INTERNAL_MEMORY_BASE_H
37 |
38 |
--------------------------------------------------------------------------------
/include/EASTL/internal/pair_fwd_decls.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | #ifndef EASTL_PAIR_FWD_DECLS_H
6 | #define EASTL_PAIR_FWD_DECLS_H
7 |
8 | #include
9 |
10 | namespace eastl
11 | {
12 | template
13 | struct pair;
14 | }
15 |
16 | #endif // EASTL_PAIR_FWD_DECLS_H
17 |
--------------------------------------------------------------------------------
/include/EASTL/internal/piecewise_construct_t.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 |
6 | #ifndef EASTL_INTERNAL_PIECEWISE_CONSTRUCT_T_H
7 | #define EASTL_INTERNAL_PIECEWISE_CONSTRUCT_T_H
8 |
9 |
10 | #include
11 | #if defined(EA_PRAGMA_ONCE_SUPPORTED)
12 | #pragma once
13 | #endif
14 |
15 | namespace eastl
16 | {
17 | ///////////////////////////////////////////////////////////////////////////////
18 | /// piecewise_construct_t
19 | ///
20 | /// http://en.cppreference.com/w/cpp/utility/piecewise_construct_t
21 | ///
22 | struct piecewise_construct_t
23 | {
24 | explicit piecewise_construct_t() = default;
25 | };
26 |
27 |
28 | ///////////////////////////////////////////////////////////////////////////////
29 | /// piecewise_construct
30 | ///
31 | /// A tag type used to disambiguate between function overloads that take two tuple arguments.
32 | ///
33 | /// http://en.cppreference.com/w/cpp/utility/piecewise_construct
34 | ///
35 | EA_CONSTEXPR piecewise_construct_t piecewise_construct = eastl::piecewise_construct_t();
36 |
37 | } // namespace eastl
38 |
39 |
40 | #endif // Header include guard
41 |
42 |
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/include/EASTL/internal/tuple_fwd_decls.h:
--------------------------------------------------------------------------------
1 | /////////////////////////////////////////////////////////////////////////////
2 | // Copyright (c) Electronic Arts Inc. All rights reserved.
3 | /////////////////////////////////////////////////////////////////////////////
4 |
5 | #ifndef EASTL_TUPLE_FWD_DECLS_H
6 | #define EASTL_TUPLE_FWD_DECLS_H
7 |
8 | #include
9 |
10 | #if EASTL_TUPLE_ENABLED
11 |
12 | namespace eastl
13 | {
14 | template
15 | class tuple;
16 |
17 | template
18 | struct tuple_size;
19 |
20 | #if EASTL_VARIABLE_TEMPLATES_ENABLED
21 | template
22 | EA_CONSTEXPR size_t tuple_size_v = tuple_size::value;
23 | #endif
24 |
25 | template
26 | struct tuple_element;
27 |
28 | template
29 | using tuple_element_t = typename tuple_element::type;
30 |
31 | template struct is_lvalue_reference;
32 |
33 | template
34 | struct conditional;
35 |
36 | template struct add_lvalue_reference;
37 |
38 | template struct remove_reference;
39 |
40 | // const typename for tuple_element_t, for when tuple or TupleImpl cannot itself be const
41 | template