├── .github ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE.md ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── docs.yml │ └── experimental.yml ├── .gitignore ├── .rspec ├── .yardopts ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── Gemfile ├── LICENSE.txt ├── README.md ├── Rakefile ├── concurrent-ruby-edge.gemspec ├── concurrent-ruby-ext.gemspec ├── concurrent-ruby.gemspec ├── docs-source ├── actor │ ├── celluloid_benchmark.rb │ ├── define.in.rb │ ├── define.out.rb │ ├── examples.in.rb │ ├── examples.out.rb │ ├── format.rb │ ├── init.rb │ ├── io.in.rb │ ├── io.out.rb │ ├── main.md │ ├── messaging.in.rb │ ├── messaging.out.rb │ ├── quick.in.rb │ ├── quick.out.rb │ ├── supervision_tree.in.rb │ └── supervision_tree.out.rb ├── cancellation.in.md ├── cancellation.init.rb ├── cancellation.out.md ├── channel.in.md ├── channel.init.rb ├── channel.md ├── channel.out.md ├── dataflow.md ├── dataflow_top_stock_calc.md ├── erlang_actor.in.md ├── erlang_actor.init.rb ├── erlang_actor.out.md ├── future.md ├── future.rb ├── images │ └── tvar │ │ ├── implementation-absolute.png │ │ ├── implementation-scalability.png │ │ ├── implementation-write-proportion-scalability.png │ │ ├── ruby-absolute.png │ │ └── ruby-scalability.png ├── logo │ ├── concurrent-ruby-logo-200x200.png │ ├── concurrent-ruby-logo-300x300.png │ ├── concurrent-ruby-logo-400x400.png │ └── concurrent-ruby-logo-930x930.png ├── medium-example.in.rb ├── medium-example.init.rb ├── medium-example.out.rb ├── promises-main.md ├── promises.in.md ├── promises.init.rb ├── promises.out.md ├── ruby-association-final-report.md ├── ruby-association-intermediate-report.md ├── signpost.md ├── synchronization-notes.md ├── synchronization.md ├── thread_pools.md ├── throttle.in.md ├── throttle.init.rb ├── throttle.out.md ├── top-stock-scala │ ├── .gitignore │ ├── README.md │ └── top-stock.rb └── tvar.md ├── examples ├── a-tour-of-go-channels │ ├── buffered-channels.rb │ ├── channels.rb │ ├── default-selection.rb │ ├── equivalent-binary-trees.rb │ ├── range-and-close.rb │ └── select.rb ├── actor_stress_test.rb ├── atomic_example.rb ├── benchmark_async.rb ├── benchmark_atomic.rb ├── benchmark_atomic_1.rb ├── benchmark_atomic_boolean.rb ├── benchmark_atomic_fixnum.rb ├── benchmark_map.rb ├── benchmark_new_futures.rb ├── benchmark_read_write_lock.rb ├── benchmark_structs.rb ├── format.rb ├── go-by-example-channels │ ├── channel-buffering.rb │ ├── channel-directions.rb │ ├── channel-synchronization.rb │ ├── channels.rb │ ├── closing-channels.rb │ ├── non-blocking-channel-operations.rb │ ├── range-over-channels.rb │ ├── rate-limiting.rb │ ├── select.rb │ ├── ticker.rb │ ├── timeouts.rb │ ├── timers.rb │ └── worker-pools.rb ├── graph_atomic_bench.rb ├── init.rb ├── stress_ruby_thread_pool.rb ├── thread_local_memory_usage.rb ├── thread_local_var_bench.rb └── who.rb ├── ext ├── concurrent-ruby-ext │ ├── atomic_boolean.c │ ├── atomic_boolean.h │ ├── atomic_fixnum.c │ ├── atomic_fixnum.h │ ├── atomic_reference.c │ ├── atomic_reference.h │ ├── extconf.rb │ └── rb_concurrent.c └── concurrent-ruby │ ├── ConcurrentRubyService.java │ └── com │ └── concurrent_ruby │ └── ext │ ├── AtomicReferenceLibrary.java │ ├── JRubyMapBackendLibrary.java │ ├── JavaAtomicBooleanLibrary.java │ ├── JavaAtomicFixnumLibrary.java │ ├── JavaSemaphoreLibrary.java │ ├── SynchronizationLibrary.java │ ├── jsr166e │ ├── ConcurrentHashMap.java │ ├── ConcurrentHashMapV8.java │ ├── LongAdder.java │ ├── Striped64.java │ └── nounsafe │ │ ├── ConcurrentHashMapV8.java │ │ ├── LongAdder.java │ │ └── Striped64.java │ └── jsr166y │ └── ThreadLocalRandom.java ├── lib ├── concurrent-ruby-edge │ ├── concurrent-edge.rb │ └── concurrent │ │ ├── actor.rb │ │ ├── actor │ │ ├── behaviour.rb │ │ ├── behaviour │ │ │ ├── abstract.rb │ │ │ ├── awaits.rb │ │ │ ├── buffer.rb │ │ │ ├── errors_on_unknown_message.rb │ │ │ ├── executes_context.rb │ │ │ ├── linking.rb │ │ │ ├── pausing.rb │ │ │ ├── removes_child.rb │ │ │ ├── sets_results.rb │ │ │ ├── supervising.rb │ │ │ └── termination.rb │ │ ├── context.rb │ │ ├── core.rb │ │ ├── default_dead_letter_handler.rb │ │ ├── envelope.rb │ │ ├── errors.rb │ │ ├── internal_delegations.rb │ │ ├── public_delegations.rb │ │ ├── reference.rb │ │ ├── root.rb │ │ ├── type_check.rb │ │ ├── utils.rb │ │ └── utils │ │ │ ├── ad_hoc.rb │ │ │ ├── balancer.rb │ │ │ ├── broadcast.rb │ │ │ └── pool.rb │ │ ├── channel.rb │ │ ├── channel │ │ ├── buffer.rb │ │ ├── buffer │ │ │ ├── base.rb │ │ │ ├── buffered.rb │ │ │ ├── dropping.rb │ │ │ ├── sliding.rb │ │ │ ├── ticker.rb │ │ │ ├── timer.rb │ │ │ └── unbuffered.rb │ │ ├── selector.rb │ │ ├── selector │ │ │ ├── after_clause.rb │ │ │ ├── default_clause.rb │ │ │ ├── error_clause.rb │ │ │ ├── put_clause.rb │ │ │ └── take_clause.rb │ │ └── tick.rb │ │ ├── edge.rb │ │ ├── edge │ │ ├── cancellation.rb │ │ ├── channel.rb │ │ ├── erlang_actor.rb │ │ ├── lock_free_linked_set.rb │ │ ├── lock_free_linked_set │ │ │ ├── node.rb │ │ │ └── window.rb │ │ ├── lock_free_queue.rb │ │ ├── old_channel_integration.rb │ │ ├── processing_actor.rb │ │ ├── promises.rb │ │ ├── throttle.rb │ │ └── version.rb │ │ ├── executor │ │ └── wrapping_executor.rb │ │ └── lazy_register.rb └── concurrent-ruby │ ├── concurrent-ruby.rb │ ├── concurrent.rb │ └── concurrent │ ├── .gitignore │ ├── agent.rb │ ├── array.rb │ ├── async.rb │ ├── atom.rb │ ├── atomic │ ├── atomic_boolean.rb │ ├── atomic_fixnum.rb │ ├── atomic_markable_reference.rb │ ├── atomic_reference.rb │ ├── count_down_latch.rb │ ├── cyclic_barrier.rb │ ├── event.rb │ ├── fiber_local_var.rb │ ├── java_count_down_latch.rb │ ├── locals.rb │ ├── lock_local_var.rb │ ├── mutex_atomic_boolean.rb │ ├── mutex_atomic_fixnum.rb │ ├── mutex_count_down_latch.rb │ ├── mutex_semaphore.rb │ ├── read_write_lock.rb │ ├── reentrant_read_write_lock.rb │ ├── semaphore.rb │ └── thread_local_var.rb │ ├── atomic_reference │ ├── atomic_direct_update.rb │ ├── mutex_atomic.rb │ └── numeric_cas_wrapper.rb │ ├── atomics.rb │ ├── collection │ ├── copy_on_notify_observer_set.rb │ ├── copy_on_write_observer_set.rb │ ├── java_non_concurrent_priority_queue.rb │ ├── lock_free_stack.rb │ ├── map │ │ ├── mri_map_backend.rb │ │ ├── non_concurrent_map_backend.rb │ │ ├── synchronized_map_backend.rb │ │ └── truffleruby_map_backend.rb │ ├── non_concurrent_priority_queue.rb │ └── ruby_non_concurrent_priority_queue.rb │ ├── concern │ ├── deprecation.rb │ ├── dereferenceable.rb │ ├── logging.rb │ ├── obligation.rb │ └── observable.rb │ ├── configuration.rb │ ├── constants.rb │ ├── dataflow.rb │ ├── delay.rb │ ├── errors.rb │ ├── exchanger.rb │ ├── executor │ ├── abstract_executor_service.rb │ ├── cached_thread_pool.rb │ ├── executor_service.rb │ ├── fixed_thread_pool.rb │ ├── immediate_executor.rb │ ├── indirect_immediate_executor.rb │ ├── java_executor_service.rb │ ├── java_single_thread_executor.rb │ ├── java_thread_pool_executor.rb │ ├── ruby_executor_service.rb │ ├── ruby_single_thread_executor.rb │ ├── ruby_thread_pool_executor.rb │ ├── safe_task_executor.rb │ ├── serial_executor_service.rb │ ├── serialized_execution.rb │ ├── serialized_execution_delegator.rb │ ├── simple_executor_service.rb │ ├── single_thread_executor.rb │ ├── thread_pool_executor.rb │ └── timer_set.rb │ ├── executors.rb │ ├── future.rb │ ├── hash.rb │ ├── immutable_struct.rb │ ├── ivar.rb │ ├── map.rb │ ├── maybe.rb │ ├── mutable_struct.rb │ ├── mvar.rb │ ├── options.rb │ ├── promise.rb │ ├── promises.rb │ ├── re_include.rb │ ├── scheduled_task.rb │ ├── set.rb │ ├── settable_struct.rb │ ├── synchronization.rb │ ├── synchronization │ ├── abstract_lockable_object.rb │ ├── abstract_object.rb │ ├── abstract_struct.rb │ ├── condition.rb │ ├── full_memory_barrier.rb │ ├── jruby_lockable_object.rb │ ├── lock.rb │ ├── lockable_object.rb │ ├── mutex_lockable_object.rb │ ├── object.rb │ ├── safe_initialization.rb │ └── volatile.rb │ ├── thread_safe │ ├── readme.txt │ ├── synchronized_delegator.rb │ ├── util.rb │ └── util │ │ ├── adder.rb │ │ ├── data_structures.rb │ │ ├── power_of_two_tuple.rb │ │ ├── striped64.rb │ │ ├── volatile.rb │ │ └── xor_shift_random.rb │ ├── timer_task.rb │ ├── tuple.rb │ ├── tvar.rb │ ├── utility │ ├── engine.rb │ ├── monotonic_time.rb │ ├── native_extension_loader.rb │ ├── native_integer.rb │ └── processor_counter.rb │ └── version.rb ├── spec ├── .gitignore ├── concurrent │ ├── .gitignore │ ├── actor_spec.rb │ ├── agent_spec.rb │ ├── array_spec.rb │ ├── async_spec.rb │ ├── atom_spec.rb │ ├── atomic │ │ ├── atomic_boolean_spec.rb │ │ ├── atomic_fixnum_spec.rb │ │ ├── atomic_markable_reference_spec.rb │ │ ├── atomic_reference_spec.rb │ │ ├── count_down_latch_spec.rb │ │ ├── cyclic_barrier_spec.rb │ │ ├── event_spec.rb │ │ ├── fiber_local_var_spec.rb │ │ ├── lock_local_var_spec.rb │ │ ├── read_write_lock_spec.rb │ │ ├── reentrant_read_write_lock_spec.rb │ │ ├── semaphore_spec.rb │ │ └── thread_local_var_spec.rb │ ├── cancellation_spec.rb │ ├── channel │ │ ├── buffer │ │ │ ├── base_shared.rb │ │ │ ├── base_spec.rb │ │ │ ├── buffered_shared.rb │ │ │ ├── buffered_spec.rb │ │ │ ├── dropping_spec.rb │ │ │ ├── sliding_spec.rb │ │ │ ├── ticker_spec.rb │ │ │ ├── timer_spec.rb │ │ │ ├── timing_buffer_shared.rb │ │ │ └── unbuffered_spec.rb │ │ ├── integration_spec.rb │ │ └── tick_spec.rb │ ├── channel_spec.rb │ ├── collection │ │ ├── copy_on_notify_observer_set_spec.rb │ │ ├── copy_on_write_observer_set_spec.rb │ │ ├── non_concurrent_priority_queue_spec.rb │ │ └── observer_set_shared.rb │ ├── collection_each_shared.rb │ ├── concern │ │ ├── dereferenceable_shared.rb │ │ ├── obligation_shared.rb │ │ ├── obligation_spec.rb │ │ ├── observable_shared.rb │ │ └── observable_spec.rb │ ├── configuration_spec.rb │ ├── dataflow_spec.rb │ ├── delay_spec.rb │ ├── edge │ │ ├── channel_spec.rb │ │ ├── erlang_actor_spec.rb │ │ └── lock_free_linked_set_spec.rb │ ├── exchanger_spec.rb │ ├── executor │ │ ├── cached_thread_pool_spec.rb │ │ ├── executor_quits.rb │ │ ├── executor_service_shared.rb │ │ ├── fixed_thread_pool_spec.rb │ │ ├── global_thread_pool_shared.rb │ │ ├── immediate_executor_spec.rb │ │ ├── indirect_immediate_executor_spec.rb │ │ ├── java_single_thread_executor_spec.rb │ │ ├── java_thread_pool_executor_spec.rb │ │ ├── ruby_single_thread_executor_spec.rb │ │ ├── ruby_thread_pool_executor_spec.rb │ │ ├── safe_task_executor_spec.rb │ │ ├── serialized_execution_spec.rb │ │ ├── simple_executor_service_spec.rb │ │ ├── thread_pool_class_cast_spec.rb │ │ ├── thread_pool_executor_shared.rb │ │ ├── thread_pool_shared.rb │ │ ├── timer_set_spec.rb │ │ └── wrapping_executor_spec.rb │ ├── future_spec.rb │ ├── hash_spec.rb │ ├── immutable_struct_spec.rb │ ├── ivar_shared.rb │ ├── ivar_spec.rb │ ├── lazy_register_spec.rb │ ├── map_spec.rb │ ├── maybe_spec.rb │ ├── monotonic_time_spec.rb │ ├── mutable_struct_spec.rb │ ├── mvar_spec.rb │ ├── no_concurrent_files_loaded_before_spec.rb │ ├── options_spec.rb │ ├── processing_actor_spec.rb │ ├── promise_spec.rb │ ├── promises_spec.rb │ ├── require_all_files_separately.rb │ ├── scheduled_task_spec.rb │ ├── set_spec.rb │ ├── settable_struct_spec.rb │ ├── struct_shared.rb │ ├── synchronization_spec.rb │ ├── thread_arguments_shared.rb │ ├── thread_safe │ │ ├── map_loops_spec.rb │ │ ├── no_unsafe_spec.rb │ │ └── synchronized_delegator_spec.rb │ ├── throttle_spec.rb │ ├── timer_task_spec.rb │ ├── tvar_spec.rb │ └── utility │ │ └── processor_count_spec.rb ├── spec_helper.rb └── support │ ├── .gitignore │ ├── example_group_extensions.rb │ └── threadsafe_test.rb ├── support ├── generate_docs.rb └── yard_full_types.rb └── yard-template └── default ├── fulldoc └── html │ └── css │ └── common.css ├── layout └── html │ ├── footer.erb │ └── objects.erb └── module └── setup.rb /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | ``` 6 | * Operating system: linux / mac / win 7 | * Ruby implementation: Ruby / JRuby / TruffleRuby 8 | * `concurrent-ruby` version: x.y.z 9 | * `concurrent-ruby-ext` installed: yes / no 10 | * `concurrent-ruby-edge` used: yes / no 11 | ``` 12 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [push, pull_request] 3 | concurrency: 4 | group: ${{ github.ref }} 5 | cancel-in-progress: true 6 | 7 | jobs: 8 | build: 9 | name: "Tests: Ruby ${{ matrix.ruby }} - ${{ matrix.os }}" 10 | runs-on: ${{ matrix.os }}-latest 11 | timeout-minutes: 10 12 | 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | ruby: [2.3, 2.4, 2.5, 2.6, 2.7, '3.0', 3.1, 3.2, 3.3, 3.4, jruby, truffleruby] 17 | os: [ubuntu] 18 | include: 19 | - ruby: ruby 20 | os: windows 21 | 22 | env: 23 | JAVA_OPTS: '-Xmx1024m' 24 | RUBYOPT: '-w' 25 | JRUBY_OPTS: '--dev' 26 | 27 | steps: 28 | - name: Clone Repo 29 | uses: actions/checkout@v4 30 | - name: Setup Ruby ${{ matrix.ruby }} 31 | uses: ruby/setup-ruby@v1 32 | with: 33 | ruby-version: ${{ matrix.ruby }} 34 | bundler-cache: true 35 | - name: Run tests 36 | run: bundle exec rake ci 37 | 38 | no-extensions: 39 | name: "Test without C extension" 40 | runs-on: ubuntu-latest 41 | timeout-minutes: 10 42 | env: 43 | RUBYOPT: '-w' 44 | steps: 45 | - uses: actions/checkout@v4 46 | - uses: ruby/setup-ruby@v1 47 | with: 48 | ruby-version: ruby 49 | bundler-cache: true 50 | - name: Run tests 51 | run: bundle exec rake spec:ci 52 | 53 | isolated: 54 | name: "Test isolated" 55 | runs-on: ubuntu-latest 56 | timeout-minutes: 10 57 | strategy: 58 | fail-fast: false 59 | matrix: 60 | ruby: [ 2.3, ruby ] # oldest and latest CRuby 61 | env: 62 | RUBYOPT: '-w' 63 | steps: 64 | - uses: actions/checkout@v4 65 | - uses: ruby/setup-ruby@v1 66 | with: 67 | ruby-version: ${{ matrix.ruby }} 68 | bundler-cache: true 69 | - run: bundle exec rake compile 70 | - run: bundle exec rake spec:isolated 71 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Publish Docs to GitHub Pages 2 | on: 3 | push: 4 | branches: [master] 5 | workflow_dispatch: 6 | 7 | permissions: 8 | contents: read 9 | 10 | # Allow one concurrent deployment 11 | concurrency: 12 | group: "pages" 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | html: 17 | runs-on: ubuntu-latest 18 | env: 19 | BUNDLE_WITH: "documentation" 20 | steps: 21 | - uses: actions/checkout@v4 22 | with: 23 | fetch-depth: 0 24 | - uses: ruby/setup-ruby@v1 25 | with: 26 | ruby-version: 2.6 27 | bundler-cache: true 28 | 29 | - run: ruby support/generate_docs.rb 30 | 31 | - name: Upload artifact 32 | uses: actions/upload-pages-artifact@v3 33 | with: 34 | path: docs 35 | 36 | deploy: 37 | permissions: 38 | pages: write 39 | id-token: write 40 | needs: [html] 41 | environment: 42 | name: github-pages 43 | url: ${{ steps.deployment.outputs.page_url }} 44 | runs-on: ubuntu-latest 45 | steps: 46 | - name: Deploy to GitHub Pages 47 | id: deployment 48 | uses: actions/deploy-pages@v4 49 | -------------------------------------------------------------------------------- /.github/workflows/experimental.yml: -------------------------------------------------------------------------------- 1 | name: Experimental Rubies CI Run 2 | on: 3 | schedule: 4 | - cron: '0 0 * * *' # Runs every day at midnight 5 | workflow_dispatch: 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | timeout-minutes: 10 11 | 12 | strategy: 13 | matrix: 14 | ruby: [head, jruby-head, truffleruby-head] 15 | 16 | env: 17 | JAVA_OPTS: '-Xmx1024m' 18 | RUBYOPT: '-w' 19 | JRUBY_OPTS: '--dev' 20 | 21 | name: "Tests: Experimental Ruby ${{ matrix.ruby }}" 22 | steps: 23 | - name: Clone Repo 24 | uses: actions/checkout@v4 25 | - name: Setup Ruby ${{ matrix.ruby }} 26 | uses: ruby/setup-ruby@v1 27 | with: 28 | ruby-version: ${{ matrix.ruby }} 29 | bundler-cache: true 30 | - name: Run tests 31 | run: bundle exec rake ci 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Project 2 | /Gemfile.lock 3 | /.bundle 4 | /tmp/* 5 | /coverage 6 | /pkg 7 | 8 | # Yard documentation 9 | /.yardoc 10 | 11 | # IDEs' files 12 | *.iml 13 | *.tmproj 14 | .idea 15 | 16 | # Local Ruby settings 17 | /.rspec-local 18 | /.rvmrc 19 | /.ruby-version 20 | /.ruby-gemset 21 | /vendor 22 | 23 | # junk 24 | .DS_Store 25 | .githubtoken 26 | 27 | # Rspec created files 28 | /spec/examples.txt 29 | 30 | # Compiled files 31 | /lib/concurrent-ruby/concurrent/concurrent_ruby.jar 32 | /lib/concurrent-ruby/concurrent/**/concurrent_ruby_ext.* 33 | /lib/concurrent-ruby/concurrent/concurrent_ruby_ext.* 34 | -------------------------------------------------------------------------------- /.rspec: -------------------------------------------------------------------------------- 1 | -I lib-edge 2 | --require spec_helper 3 | --color 4 | --warnings 5 | --format documentation 6 | -------------------------------------------------------------------------------- /.yardopts: -------------------------------------------------------------------------------- 1 | --error:" use `bundle exec rake yard` instead" 2 | --output-dir tmp 3 | -- 4 | no-lib 5 | - 6 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | version = File.read("#{__dir__}/lib/concurrent-ruby/concurrent/version.rb")[/'(.+)'/, 1] or raise 4 | edge_version = File.read("#{__dir__}/lib/concurrent-ruby-edge/concurrent/edge/version.rb")[/'(.+)'/, 1] or raise 5 | 6 | no_path = ENV['NO_PATH'] 7 | options = no_path ? {} : { path: '.' } 8 | 9 | gem 'concurrent-ruby', version, options 10 | gem 'concurrent-ruby-edge', edge_version, options 11 | gem 'concurrent-ruby-ext', version, options.merge(platform: :mri) 12 | 13 | group :development do 14 | gem 'rake', '~> 13.0' 15 | gem 'rake-compiler', '~> 1.0', '>= 1.0.7', '!= 1.2.4' 16 | gem 'rake-compiler-dock', '~> 1.0' 17 | gem 'pry', '~> 0.11', platforms: :mri 18 | end 19 | 20 | group :documentation, optional: true do 21 | gem 'yard', '~> 0.9.0', require: false 22 | gem 'redcarpet', '~> 3.0', platforms: :mri # understands github markdown 23 | gem 'md-ruby-eval', '~> 0.6' 24 | end 25 | 26 | group :testing do 27 | gem 'rspec', '~> 3.7' 28 | gem 'timecop', '~> 0.9' 29 | gem 'sigdump', require: false 30 | end 31 | 32 | # made opt-in since it will not install on jruby 1.7 33 | group :coverage, optional: !ENV['COVERAGE'] do 34 | gem 'simplecov', '~> 0.16.0', require: false 35 | gem 'coveralls', '~> 0.8.2', require: false 36 | end 37 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) Jerry D'Antonio -- released under the MIT license. 2 | 3 | http://www.opensource.org/licenses/mit-license.php 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /concurrent-ruby-edge.gemspec: -------------------------------------------------------------------------------- 1 | version = File.read("#{__dir__}/lib/concurrent-ruby/concurrent/version.rb")[/'(.+)'/, 1] or raise 2 | edge_version = File.read("#{__dir__}/lib/concurrent-ruby-edge/concurrent/edge/version.rb")[/'(.+)'/, 1] or raise 3 | 4 | Gem::Specification.new do |s| 5 | git_files = `git ls-files`.split("\n") 6 | 7 | s.name = 'concurrent-ruby-edge' 8 | s.version = edge_version 9 | s.platform = Gem::Platform::RUBY 10 | s.authors = ["Jerry D'Antonio", 'Petr Chalupa', 'The Ruby Concurrency Team'] 11 | s.email = 'concurrent-ruby@googlegroups.com' 12 | s.homepage = 'http://www.concurrent-ruby.com' 13 | s.summary = 'Edge features and additions to the concurrent-ruby gem.' 14 | s.license = 'MIT' 15 | s.date = Time.now.strftime('%Y-%m-%d') 16 | s.files = Dir['lib/concurrent-ruby-edge/**/*.rb'] & git_files 17 | s.extra_rdoc_files = Dir['README*', 'LICENSE*', 'CHANGELOG*'] 18 | s.require_paths = ['lib/concurrent-ruby-edge'] 19 | s.description = <<-TXT 20 | These features are under active development and may change frequently. They are expected not to 21 | keep backward compatibility (there may also lack tests and documentation). Semantic versions will 22 | be obeyed though. Features developed in `concurrent-ruby-edge` are expected to move to `concurrent-ruby` when final. 23 | Please see http://concurrent-ruby.com for more information. 24 | TXT 25 | 26 | s.required_ruby_version = '>= 2.3' 27 | 28 | s.add_runtime_dependency 'concurrent-ruby', "~> #{version.split('.')[0..1].join('.')}" 29 | end 30 | -------------------------------------------------------------------------------- /concurrent-ruby-ext.gemspec: -------------------------------------------------------------------------------- 1 | version = File.read("#{__dir__}/lib/concurrent-ruby/concurrent/version.rb")[/'(.+)'/, 1] or raise 2 | 3 | Gem::Specification.new do |s| 4 | s.name = 'concurrent-ruby-ext' 5 | s.version = version 6 | s.platform = Gem::Platform::RUBY 7 | s.authors = ["Jerry D'Antonio", 'The Ruby Concurrency Team'] 8 | s.email = 'concurrent-ruby@googlegroups.com' 9 | s.homepage = 'http://www.concurrent-ruby.com' 10 | s.summary = 'C extensions to optimize concurrent-ruby under MRI.' 11 | s.license = 'MIT' 12 | s.date = Time.now.strftime('%Y-%m-%d') 13 | 14 | s.description = <<-EOF 15 | C extensions to optimize the concurrent-ruby gem when running under MRI. 16 | Please see http://concurrent-ruby.com for more information. 17 | EOF 18 | 19 | s.files = Dir['ext/**/*.{h,c,cpp}'] 20 | s.extra_rdoc_files = Dir['README*', 'LICENSE*', 'CHANGELOG*'] 21 | s.require_paths = ['lib'] 22 | s.extensions = 'ext/concurrent-ruby-ext/extconf.rb' 23 | 24 | s.required_ruby_version = '>= 2.3' 25 | 26 | s.add_runtime_dependency 'concurrent-ruby', "= #{version}" 27 | end 28 | -------------------------------------------------------------------------------- /concurrent-ruby.gemspec: -------------------------------------------------------------------------------- 1 | version = File.read("#{__dir__}/lib/concurrent-ruby/concurrent/version.rb")[/'(.+)'/, 1] or raise 2 | 3 | Gem::Specification.new do |s| 4 | git_files = `git ls-files`.split("\n") 5 | 6 | s.name = 'concurrent-ruby' 7 | s.version = version 8 | s.platform = Gem::Platform::RUBY 9 | s.authors = ["Jerry D'Antonio", 'Petr Chalupa', 'The Ruby Concurrency Team'] 10 | s.email = 'concurrent-ruby@googlegroups.com' 11 | s.homepage = 'http://www.concurrent-ruby.com' 12 | s.summary = 'Modern concurrency tools for Ruby. Inspired by Erlang, Clojure, Scala, Haskell, F#, C#, Java, and classic concurrency patterns.' 13 | s.license = 'MIT' 14 | s.date = Time.now.strftime('%Y-%m-%d') 15 | s.files = [*Dir['lib/concurrent-ruby/**/*.rb'] & git_files, 16 | *Dir['ext/concurrent-ruby/**/*'] & git_files, 17 | 'Rakefile', 18 | 'Gemfile', 19 | 'lib/concurrent-ruby/concurrent/concurrent_ruby.jar' 20 | ] 21 | s.extra_rdoc_files = Dir['README*', 'LICENSE*', 'CHANGELOG*'] 22 | s.require_paths = ['lib/concurrent-ruby'] 23 | s.description = <<-TXT.gsub(/^ +/, '') 24 | Modern concurrency tools including agents, futures, promises, thread pools, actors, supervisors, and more. 25 | Inspired by Erlang, Clojure, Go, JavaScript, actors, and classic concurrency patterns. 26 | TXT 27 | s.metadata["source_code_uri"] = "https://github.com/ruby-concurrency/concurrent-ruby" 28 | s.metadata["changelog_uri"] = "https://github.com/ruby-concurrency/concurrent-ruby/blob/master/CHANGELOG.md" 29 | s.required_ruby_version = '>= 2.3' 30 | end 31 | -------------------------------------------------------------------------------- /docs-source/actor/define.in.rb: -------------------------------------------------------------------------------- 1 | Message = Struct.new :action, :value # 2 | 3 | class AnActor < Concurrent::Actor::RestartingContext 4 | def initialize(init) 5 | @counter = init 6 | end 7 | 8 | # override #on_message to define actor's behaviour on message received 9 | def on_message(message) 10 | case message.action 11 | when :add 12 | @counter = @counter + message.value 13 | when :subtract 14 | @counter = @counter - message.value 15 | when :value 16 | @counter 17 | else 18 | pass 19 | end 20 | end 21 | 22 | # set counter to zero when there is an error 23 | def on_event(event) 24 | if event == :reset 25 | @counter = 0 # ignore initial value 26 | end 27 | end 28 | end # 29 | 30 | an_actor = AnActor.spawn name: 'an_actor', args: 10 # 31 | an_actor << Message.new(:add, 1) << Message.new(:subtract, 2) # 32 | an_actor.ask!(Message.new(:value, nil)) 33 | an_actor << :boo << Message.new(:add, 1) # 34 | an_actor.ask!(Message.new(:value, nil)) 35 | an_actor << :terminate! 36 | 37 | -------------------------------------------------------------------------------- /docs-source/actor/define.out.rb: -------------------------------------------------------------------------------- 1 | Message = Struct.new :action, :value 2 | 3 | class AnActor < Concurrent::Actor::RestartingContext 4 | def initialize(init) 5 | @counter = init 6 | end 7 | 8 | # override #on_message to define actor's behaviour on message received 9 | def on_message(message) 10 | case message.action 11 | when :add 12 | @counter = @counter + message.value 13 | when :subtract 14 | @counter = @counter - message.value 15 | when :value 16 | @counter 17 | else 18 | pass 19 | end 20 | end 21 | 22 | # set counter to zero when there is an error 23 | def on_event(event) 24 | if event == :reset 25 | @counter = 0 # ignore initial value 26 | end 27 | end 28 | end 29 | 30 | an_actor = AnActor.spawn name: 'an_actor', args: 10 31 | an_actor << Message.new(:add, 1) << Message.new(:subtract, 2) 32 | an_actor.ask!(Message.new(:value, nil)) # => 9 33 | an_actor << :boo << Message.new(:add, 1) 34 | an_actor.ask!(Message.new(:value, nil)) # => 1 35 | an_actor << :terminate! 36 | # => # 37 | 38 | -------------------------------------------------------------------------------- /docs-source/actor/examples.in.rb: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs-source/actor/examples.out.rb: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs-source/actor/format.rb: -------------------------------------------------------------------------------- 1 | require 'rubygems' 2 | require 'bundler/setup' 3 | require 'pry' 4 | require 'pp' 5 | 6 | root = File.dirname(File.expand_path(Process.argv0)) 7 | input_paths = if ARGV.empty? 8 | Dir.glob("#{root}/*.in.rb") 9 | else 10 | ARGV 11 | end.map { |p| File.expand_path p } 12 | 13 | input_paths.each_with_index do |input_path, i| 14 | 15 | pid = fork do 16 | require_relative 'init' 17 | 18 | begin 19 | output_path = input_path.gsub /\.in\.rb$/, '.out.rb' 20 | input = File.readlines(input_path) 21 | 22 | chunks = [] 23 | line = '' 24 | 25 | while !input.empty? 26 | line += input.shift 27 | if Pry::Code.complete_expression? line 28 | chunks << line 29 | line = '' 30 | end 31 | end 32 | 33 | raise unless line.empty? 34 | 35 | chunks.map! { |chunk| [chunk, [chunk.split($/).size, 1].max] } 36 | environment = Module.new.send :binding 37 | evaluate = ->(code, line) do 38 | eval(code, environment, input_path, line) 39 | end 40 | 41 | indent = 50 42 | 43 | line_count = 1 44 | output = '' 45 | chunks.each do |chunk, lines| 46 | result = evaluate.(chunk, line_count) 47 | unless chunk.strip.empty? || chunk =~ /\A *#/ 48 | pre_lines = chunk.lines.to_a 49 | last_line = pre_lines.pop 50 | output << pre_lines.join 51 | 52 | if last_line =~ /\#$/ 53 | output << last_line.gsub(/\#$/, '') 54 | else 55 | if last_line.size < indent && result.inspect.size < indent 56 | output << "%-#{indent}s %s" % [last_line.chomp, "# => #{result.inspect}\n"] 57 | else 58 | output << last_line << " # => #{result.inspect}\n" 59 | end 60 | end 61 | else 62 | output << chunk 63 | end 64 | line_count += lines 65 | end 66 | 67 | puts "#{input_path}\n -> #{output_path}" 68 | #puts output 69 | File.write(output_path, output) 70 | rescue => ex 71 | puts "#{ex} (#{ex.class})\n#{ex.backtrace * "\n"}" 72 | end 73 | end 74 | 75 | Process.wait pid 76 | end 77 | -------------------------------------------------------------------------------- /docs-source/actor/init.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent' 2 | require 'concurrent-edge' 3 | -------------------------------------------------------------------------------- /docs-source/actor/io.in.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent' 2 | 3 | # Concurrent.use_simple_logger(:WARN, STDOUT) 4 | 5 | # First option is to use operation pool 6 | 7 | class ActorDoingIO < Concurrent::Actor::RestartingContext 8 | def on_message(message) 9 | # do IO operation 10 | end 11 | 12 | def default_executor 13 | Concurrent.global_io_executor 14 | end 15 | end # 16 | 17 | actor_doing_io = ActorDoingIO.spawn :actor_doing_io 18 | actor_doing_io.executor == Concurrent.global_io_executor 19 | 20 | # It can be also built into a pool so there is not too many IO operations 21 | 22 | class IOWorker < Concurrent::Actor::Context 23 | def on_message(io_job) 24 | # do IO work 25 | sleep 0.1 26 | puts "#{path} second:#{(Time.now.to_f*100).floor} message:#{io_job}" 27 | end 28 | 29 | def default_executor 30 | Concurrent.global_io_executor 31 | end 32 | end # 33 | 34 | pool = Concurrent::Actor::Utils::Pool.spawn('pool', 2) do |index| 35 | IOWorker.spawn(name: "worker-#{index}") 36 | end 37 | 38 | pool << 1 << 2 << 3 << 4 << 5 << 6 39 | 40 | # prints two lines each second 41 | # /pool/worker-0 second:1414677666 message:1 42 | # /pool/worker-1 second:1414677666 message:2 43 | # /pool/worker-0 second:1414677667 message:3 44 | # /pool/worker-1 second:1414677667 message:4 45 | # /pool/worker-0 second:1414677668 message:5 46 | # /pool/worker-1 second:1414677668 message:6 47 | 48 | sleep 1 49 | -------------------------------------------------------------------------------- /docs-source/actor/io.out.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent' # => false 2 | 3 | # Concurrent.use_simple_logger(:WARN, STDOUT) 4 | 5 | # First option is to use operation pool 6 | 7 | class ActorDoingIO < Concurrent::Actor::RestartingContext 8 | def on_message(message) 9 | # do IO operation 10 | end 11 | 12 | def default_executor 13 | Concurrent.global_io_executor 14 | end 15 | end 16 | 17 | actor_doing_io = ActorDoingIO.spawn :actor_doing_io 18 | # => # 19 | actor_doing_io.executor == Concurrent.global_io_executor 20 | # => true 21 | 22 | # It can be also built into a pool so there is not too many IO operations 23 | 24 | class IOWorker < Concurrent::Actor::Context 25 | def on_message(io_job) 26 | # do IO work 27 | sleep 0.1 28 | puts "#{path} second:#{(Time.now.to_f*100).floor} message:#{io_job}" 29 | end 30 | 31 | def default_executor 32 | Concurrent.global_io_executor 33 | end 34 | end 35 | 36 | pool = Concurrent::Actor::Utils::Pool.spawn('pool', 2) do |index| 37 | IOWorker.spawn(name: "worker-#{index}") 38 | end 39 | # => # 40 | 41 | pool << 1 << 2 << 3 << 4 << 5 << 6 42 | # => # 43 | 44 | # prints two lines each second 45 | # /pool/worker-0 second:1414677666 message:1 46 | # /pool/worker-1 second:1414677666 message:2 47 | # /pool/worker-0 second:1414677667 message:3 48 | # /pool/worker-1 second:1414677667 message:4 49 | # /pool/worker-0 second:1414677668 message:5 50 | # /pool/worker-1 second:1414677668 message:6 51 | 52 | sleep 1 # => 1 53 | -------------------------------------------------------------------------------- /docs-source/actor/messaging.in.rb: -------------------------------------------------------------------------------- 1 | require 'algebrick' 2 | 3 | # Actor message protocol definition with Algebrick 4 | Protocol = Algebrick.type do 5 | variants Add = type { fields! a: Numeric, b: Numeric }, 6 | Subtract = type { fields! a: Numeric, b: Numeric } 7 | end 8 | 9 | class Calculator < Concurrent::Actor::RestartingContext 10 | include Algebrick::Matching 11 | 12 | def on_message(message) 13 | # pattern matching on the message with deconstruction 14 | # ~ marks values which are passed to the block 15 | match message, 16 | (on Add.(~any, ~any) do |a, b| 17 | a + b 18 | end), 19 | # or using multi-assignment 20 | (on ~Subtract do |(a, b)| 21 | a - b 22 | end) 23 | end 24 | end # 25 | 26 | calculator = Calculator.spawn('calculator') 27 | addition = calculator.ask Add[1, 2] 28 | subtraction = calculator.ask Subtract[1, 0.5] 29 | results = (addition & subtraction) 30 | results.value! 31 | 32 | calculator.ask! :terminate! 33 | -------------------------------------------------------------------------------- /docs-source/actor/messaging.out.rb: -------------------------------------------------------------------------------- 1 | require 'algebrick' # => true 2 | 3 | # Actor message protocol definition with Algebrick 4 | Protocol = Algebrick.type do 5 | variants Add = type { fields! a: Numeric, b: Numeric }, 6 | Subtract = type { fields! a: Numeric, b: Numeric } 7 | end # => Protocol(Add | Subtract) 8 | 9 | class Calculator < Concurrent::Actor::RestartingContext 10 | include Algebrick::Matching 11 | 12 | def on_message(message) 13 | # pattern matching on the message with deconstruction 14 | # ~ marks values which are passed to the block 15 | match message, 16 | (on Add.(~any, ~any) do |a, b| 17 | a + b 18 | end), 19 | # or using multi-assignment 20 | (on ~Subtract do |(a, b)| 21 | a - b 22 | end) 23 | end 24 | end 25 | 26 | calculator = Calculator.spawn('calculator') 27 | # => # 28 | addition = calculator.ask Add[1, 2] 29 | # => <#Concurrent::Promises::Future:0x7fbedc05f7b0 pending> 30 | subtraction = calculator.ask Subtract[1, 0.5] 31 | # => <#Concurrent::Promises::Future:0x7fbedd891388 pending> 32 | results = (addition & subtraction) 33 | # => <#Concurrent::Promises::Future:0x7fbedc04eeb0 pending> 34 | results.value! # => [3, 0.5] 35 | 36 | calculator.ask! :terminate! # => true 37 | -------------------------------------------------------------------------------- /docs-source/actor/quick.in.rb: -------------------------------------------------------------------------------- 1 | class Adder < Concurrent::Actor::RestartingContext 2 | def initialize(init) 3 | @count = init 4 | end 5 | 6 | def on_message(message) 7 | case message 8 | when :add 9 | @count += 1 10 | else 11 | # pass to ErrorsOnUnknownMessage behaviour, which will just fail 12 | pass 13 | end 14 | end 15 | end # 16 | 17 | # `link: true` makes the actor linked to root actor and supervised 18 | # which is default behavior 19 | adder = Adder.spawn(name: :adder, link: true, args: [1]) 20 | adder.parent 21 | 22 | # tell and forget 23 | adder.tell(:add).tell(:add) 24 | # ask to get result 25 | adder.ask!(:add) 26 | # fail the actor 27 | adder.ask!(:bad) rescue $! 28 | # actor is restarted with initial values 29 | adder.ask!(:add) 30 | adder.ask!(:terminate!) 31 | -------------------------------------------------------------------------------- /docs-source/actor/quick.out.rb: -------------------------------------------------------------------------------- 1 | class Adder < Concurrent::Actor::RestartingContext 2 | def initialize(init) 3 | @count = init 4 | end 5 | 6 | def on_message(message) 7 | case message 8 | when :add 9 | @count += 1 10 | else 11 | # pass to ErrorsOnUnknownMessage behaviour, which will just fail 12 | pass 13 | end 14 | end 15 | end 16 | 17 | # `link: true` makes the actor linked to root actor and supervised 18 | # which is default behavior 19 | adder = Adder.spawn(name: :adder, link: true, args: [1]) 20 | # => # 21 | adder.parent 22 | # => # 23 | 24 | # tell and forget 25 | adder.tell(:add).tell(:add) 26 | # => # 27 | # ask to get result 28 | adder.ask!(:add) # => 4 29 | # fail the actor 30 | adder.ask!(:bad) rescue $! 31 | # => #> 32 | # actor is restarted with initial values 33 | adder.ask!(:add) # => 2 34 | adder.ask!(:terminate!) # => true 35 | -------------------------------------------------------------------------------- /docs-source/actor/supervision_tree.in.rb: -------------------------------------------------------------------------------- 1 | 2 | class Master < Concurrent::Actor::RestartingContext 3 | def initialize 4 | # create listener a supervised child of master 5 | @listener = Listener.spawn(name: 'listener1', supervise: true) 6 | end 7 | 8 | def on_message(msg) 9 | command, *args = msg 10 | case command 11 | when :listener 12 | @listener 13 | when :reset, :terminated, :resumed, :paused 14 | log(DEBUG) { " got #{msg} from #{envelope.sender}"} 15 | else 16 | pass 17 | end 18 | end 19 | 20 | # TODO this should be a part of a behaviour, it ensures that children are restarted/paused etc. when theirs parents are 21 | def on_event(event) 22 | event_name, _ = event 23 | case event_name 24 | when :resetting, :restarting 25 | @listener << :terminate! 26 | when Exception, :paused 27 | @listener << :pause! 28 | when :resumed 29 | @listener << :resume! 30 | end 31 | end 32 | end # 33 | 34 | class Listener < Concurrent::Actor::RestartingContext 35 | def initialize 36 | @number = (rand() * 100).to_i 37 | end 38 | 39 | def on_message(msg) 40 | case msg 41 | when :number 42 | @number 43 | else 44 | pass 45 | end 46 | end 47 | 48 | end # 49 | 50 | master = Master.spawn(name: 'master', supervise: true) 51 | listener = master.ask!(:listener) 52 | listener.ask!(:number) 53 | # crash the listener which is supervised by master, it's restarted automatically reporting a different number 54 | listener.tell(:crash) 55 | listener.ask!(:number) 56 | 57 | master << :crash 58 | 59 | sleep 0.1 60 | 61 | # ask for listener again, old one is terminated with master and replaced with new one 62 | listener.ask!(:terminated?) 63 | listener = master.ask!(:listener) 64 | listener.ask!(:number) 65 | 66 | master.ask!(:terminate!) 67 | 68 | sleep 0.1 69 | -------------------------------------------------------------------------------- /docs-source/cancellation.init.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent-edge' 2 | 3 | def do_stuff(*args) 4 | sleep 0.01 5 | :stuff 6 | end 7 | -------------------------------------------------------------------------------- /docs-source/channel.init.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent-edge' 2 | 3 | def do_stuff(*args) 4 | sleep 0.01 5 | :stuff 6 | end 7 | -------------------------------------------------------------------------------- /docs-source/erlang_actor.init.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent-edge' 2 | 3 | include Concurrent::ErlangActor::EnvironmentConstants 4 | 5 | def do_stuff(*args) 6 | sleep 0.01 7 | :stuff 8 | end 9 | 10 | -------------------------------------------------------------------------------- /docs-source/future.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent' 2 | require 'csv' 3 | require 'open-uri' 4 | 5 | class Ticker 6 | def get_year_end_closing(symbol, year, api_key) 7 | uri = "https://www.alphavantage.co/query?function=TIME_SERIES_MONTHLY&symbol=#{symbol}&apikey=#{api_key}&datatype=csv" 8 | data = [] 9 | csv = URI.parse(uri).read 10 | if csv.include?('call frequency') 11 | return :rate_limit_exceeded 12 | end 13 | CSV.parse(csv, headers: true) do |row| 14 | data << row['close'].to_f if row['timestamp'].include?(year.to_s) 15 | end 16 | year_end = data.first 17 | year_end 18 | rescue => e 19 | p e 20 | end 21 | end 22 | 23 | api_key = ENV['ALPHAVANTAGE_KEY'] 24 | abort(error_message) unless api_key 25 | 26 | # Future 27 | price = Concurrent::Future.execute{ Ticker.new.get_year_end_closing('TWTR', 2013, api_key) } 28 | p price.state #=> :pending 29 | p price.pending? #=> true 30 | p price.value(0) #=> nil (does not block) 31 | 32 | sleep(1) # do other stuff 33 | 34 | p price.value #=> 63.65 (after blocking if necessary) 35 | p price.state #=> :fulfilled 36 | p price.fulfilled? #=> true 37 | p price.value #=> 63.65 38 | -------------------------------------------------------------------------------- /docs-source/images/tvar/implementation-absolute.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/images/tvar/implementation-absolute.png -------------------------------------------------------------------------------- /docs-source/images/tvar/implementation-scalability.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/images/tvar/implementation-scalability.png -------------------------------------------------------------------------------- /docs-source/images/tvar/implementation-write-proportion-scalability.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/images/tvar/implementation-write-proportion-scalability.png -------------------------------------------------------------------------------- /docs-source/images/tvar/ruby-absolute.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/images/tvar/ruby-absolute.png -------------------------------------------------------------------------------- /docs-source/images/tvar/ruby-scalability.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/images/tvar/ruby-scalability.png -------------------------------------------------------------------------------- /docs-source/logo/concurrent-ruby-logo-200x200.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/logo/concurrent-ruby-logo-200x200.png -------------------------------------------------------------------------------- /docs-source/logo/concurrent-ruby-logo-300x300.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/logo/concurrent-ruby-logo-300x300.png -------------------------------------------------------------------------------- /docs-source/logo/concurrent-ruby-logo-400x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/logo/concurrent-ruby-logo-400x400.png -------------------------------------------------------------------------------- /docs-source/logo/concurrent-ruby-logo-930x930.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/logo/concurrent-ruby-logo-930x930.png -------------------------------------------------------------------------------- /docs-source/medium-example.init.rb: -------------------------------------------------------------------------------- 1 | $captured_out = [] 2 | 3 | def $captured_out.write(str) 4 | push str 5 | end 6 | 7 | def $captured_out.close 8 | end 9 | 10 | def get_captured_output 11 | size = $captured_out.size 12 | $captured_out.shift(size).join 13 | end 14 | -------------------------------------------------------------------------------- /docs-source/promises.init.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent-edge' 2 | 3 | def do_stuff(*args) 4 | sleep 0.01 5 | :stuff 6 | end 7 | -------------------------------------------------------------------------------- /docs-source/signpost.md: -------------------------------------------------------------------------------- 1 | # ConcurrentRuby API documentation 2 | 3 | Pick a `concurrent-ruby` version: 4 | 5 | * [master](./master/index.html) 6 | * [1.3.5 with edge 0.7.2](./1.3.5/index.html) 7 | * [1.1.10 with edge 0.6.0](./1.1.10/index.html) 8 | * [1.1.9 with edge 0.6.0](./1.1.9/index.html) 9 | * [1.1.8 with edge 0.6.0](./1.1.8/index.html) 10 | * [1.1.7 with edge 0.6.0](./1.1.7/index.html) 11 | * [1.1.6 with edge 0.6.0](./1.1.6/index.html) 12 | * [1.1.5 with edge 0.5.0](./1.1.5/index.html) 13 | -------------------------------------------------------------------------------- /docs-source/synchronization-notes.md: -------------------------------------------------------------------------------- 1 | # Concurrent Ruby Notes 2 | 3 | ## Locks 4 | 5 | Concurrent Ruby also has an internal extension of `Object` called 6 | `LockableObject`, which provides same synchronization primitives as Java's 7 | Object: `synchronize(&block)`, `wait(timeout = nil)`, 8 | `wait_until(timeout = nil, &condition)`, `signal`, `broadcast`. This class is 9 | intended for internal use in `concurrent-ruby` only and it does not support 10 | subclassing (since it cannot protect its lock from its children, for more 11 | details see [this article](http://wiki.apidesign.org/wiki/Java_Monitor)). It has 12 | minimal interface to be able to use directly locking available on given 13 | platforms. 14 | 15 | For non-internal use there is `Lock` and `Condition` implementation in 16 | `Synchronization` namespace, a condition can be obtained with `new_condition` 17 | method on `Lock`. So far their implementation is naive and requires more work. 18 | API is not expected to change. 19 | 20 | ## Method names conventions 21 | 22 | Methods starting with `ns_` are marking methods that are not using 23 | synchronization by themselves, they have to be used inside synchronize block. 24 | They are usually used in pairs to separate the synchronization from behavior and 25 | to allow to call methods in the same object without double locking. 26 | 27 | ``` ruby 28 | class Node 29 | # ... 30 | def left 31 | synchronize { ns_left } 32 | end 33 | 34 | def right 35 | synchronize { ns_right } 36 | end 37 | 38 | def to_a 39 | # avoids double locking 40 | synchronize { [ns_left, ns_right] } 41 | end 42 | 43 | private 44 | 45 | def ns_left 46 | @left 47 | end 48 | 49 | def ns_right 50 | @right 51 | end 52 | # ... 53 | end 54 | ``` 55 | ## Piggybacking 56 | 57 | Any write executed before volatile write based on program-order is visible to 58 | the volatile read as well, which allows 59 | [piggybacking](http://stackoverflow.com/questions/8769570/volatile-piggyback-is-this-enough-for-visiblity). 60 | Because it creates synchronizes-with (JMM term) order between volatile write 61 | and read, which participates in creating happens-before order. 62 | 63 | This trick is used in some of the abstractions, to avoid unnecessary 64 | synchronization or volatile declarations. 65 | -------------------------------------------------------------------------------- /docs-source/synchronization.md: -------------------------------------------------------------------------------- 1 | # Synchronization 2 | 3 | [This document](https://docs.google.com/document/d/1pVzU8w_QF44YzUCCab990Q_WZOdhpKolCIHaiXG-sPw/edit?usp=sharing) 4 | is moved to Google documents. It will be moved here once final and stabilized. 5 | 6 | -------------------------------------------------------------------------------- /docs-source/throttle.init.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent-edge' 2 | 3 | def do_stuff(*args) 4 | sleep 0.01 5 | :stuff 6 | end 7 | -------------------------------------------------------------------------------- /docs-source/top-stock-scala/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/docs-source/top-stock-scala/.gitignore -------------------------------------------------------------------------------- /docs-source/top-stock-scala/top-stock.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent' 2 | require 'csv' 3 | require 'open-uri' 4 | 5 | def get_year_end_closing(symbol, year, api_key) 6 | uri = "https://www.alphavantage.co/query?function=TIME_SERIES_MONTHLY&symbol=#{symbol}&apikey=#{api_key}&datatype=csv" 7 | data = [] 8 | csv = URI.parse(uri).read 9 | if csv.include?('call frequency') 10 | return :rate_limit_exceeded 11 | end 12 | CSV.parse(csv, headers: true) do |row| 13 | data << row['close'].to_f if row['timestamp'].include?(year.to_s) 14 | end 15 | price = data.max 16 | [symbol, price] 17 | end 18 | 19 | def get_top_stock(symbols, year, timeout = 10) 20 | api_key = ENV['ALPHAVANTAGE_KEY'] 21 | abort(error_message) unless api_key 22 | 23 | stock_prices = symbols.collect{|symbol| Concurrent::dataflow{ get_year_end_closing(symbol, year, api_key) }} 24 | Concurrent::dataflow(*stock_prices) { |*prices| 25 | next :rate_limit_exceeded if prices.include?(:rate_limit_exceeded) 26 | prices.reduce(['', 0.0]){|highest, price| price.last > highest.last ? price : highest} 27 | }.value(timeout) 28 | end 29 | 30 | def error_message 31 | <<~EOF 32 | PLEASE provide a Alpha Vantage api key for the example to work 33 | usage: 34 | ALPHAVANTAGE_KEY=YOUR_API_KEY bundle exec ruby top-stock-scala/top-stock.rb 35 | EOF 36 | end 37 | 38 | symbols = ['AAPL', 'GOOG', 'IBM', 'ORCL', 'MSFT'] 39 | year = 2018 40 | 41 | result = get_top_stock(symbols, year) 42 | 43 | if result == :rate_limit_exceeded 44 | puts "API rate limit exceeded" 45 | else 46 | top_stock, highest_price = result 47 | puts "Top stock of #{year} is #{top_stock} closing at price $#{highest_price}" 48 | end 49 | -------------------------------------------------------------------------------- /examples/a-tour-of-go-channels/buffered-channels.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## A Tour of Go: Buffered Channels 8 | # https://tour.golang.org/concurrency/3 9 | 10 | ch = Channel.new(capacity: 2) 11 | ch << 1 12 | ch << 2 13 | 14 | puts ~ch 15 | puts ~ch 16 | 17 | __END__ 18 | 1 19 | 2 20 | -------------------------------------------------------------------------------- /examples/a-tour-of-go-channels/channels.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## A Tour of Go: Channels 8 | # https://tour.golang.org/concurrency/2 9 | 10 | def sum(a, c) 11 | sum = a.reduce(0, &:+) 12 | c << sum # `<<` is an alias for `put` or `send` 13 | end 14 | 15 | a = [7, 2, 8, -9, 4, 0] 16 | l = a.length / 2 17 | c = Channel.new 18 | 19 | Channel.go { sum(a[-l, l], c) } 20 | Channel.go { sum(a[0, l], c) } 21 | x, y = ~c, ~c # `~` is an alias for `take` or `receive` 22 | 23 | puts [x, y, x+y].join(' ') 24 | 25 | __END__ 26 | -5 17 12 27 | -------------------------------------------------------------------------------- /examples/a-tour-of-go-channels/default-selection.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## A Tour of Go: Default Selection 8 | # https://tour.golang.org/concurrency/6 9 | 10 | tick = Channel.tick(0.1) 11 | boom = Channel.after(0.5) 12 | 13 | loop do 14 | Channel.select do |s| 15 | s.take(tick) { |t| print "tick.\n" if t } 16 | s.take(boom) do 17 | print "BOOM!\n" 18 | exit 19 | end 20 | s.default do 21 | print " .\n" 22 | sleep(0.05) 23 | end 24 | end 25 | end 26 | 27 | __END__ 28 | . 29 | . 30 | tick. 31 | . 32 | . 33 | tick. 34 | . 35 | . 36 | tick. 37 | . 38 | . 39 | tick. 40 | . 41 | . 42 | tick. 43 | BOOM! 44 | -------------------------------------------------------------------------------- /examples/a-tour-of-go-channels/equivalent-binary-trees.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## A Tour of Go: Equivalent Binary Trees 8 | # https://tour.golang.org/concurrency/8 9 | 10 | Tree = Struct.new(:value, :left, :right) 11 | 12 | def new_tree(n, size = 10) 13 | values = [*1..size].collect{|i| i * n }.sample(size) 14 | root = Tree.new(values.shift) 15 | 16 | inserter = ->(current, new) do 17 | if new.value <= current.value 18 | if current.left.nil? 19 | current.left = new 20 | else 21 | inserter.call(current.left, new) 22 | end 23 | else 24 | if current.right.nil? 25 | current.right = new 26 | else 27 | inserter.call(current.right, new) 28 | end 29 | end 30 | end 31 | 32 | while value = values.shift do 33 | inserter.call(root, Tree.new(value)) 34 | end 35 | 36 | root 37 | end 38 | 39 | def walk(tree, channel) 40 | _walk = ->(t, ch) do 41 | return unless t 42 | _walk.call(t.left, ch) 43 | ch << t.value 44 | _walk.call(t.right, ch) 45 | end 46 | 47 | _walk.call(tree, channel) 48 | channel.close 49 | end 50 | 51 | def same(t1, t2) 52 | ch1 = Channel.new 53 | ch2 = Channel.new 54 | 55 | Channel.go { walk(t1, ch1) } 56 | Channel.go { walk(t2, ch2) } 57 | 58 | ch1.each do |v| 59 | return false unless v == ~ch2 60 | end 61 | 62 | return true 63 | end 64 | 65 | puts same(new_tree(1), new_tree(1)) 66 | puts same(new_tree(1), new_tree(2)) 67 | 68 | __END__ 69 | true 70 | false 71 | -------------------------------------------------------------------------------- /examples/a-tour-of-go-channels/range-and-close.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## A Tour of Go: Range and Close 8 | # https://tour.golang.org/concurrency/4 9 | 10 | def fibonacci(n, c) 11 | x, y = 0, 1 12 | (1..n).each do 13 | c << x 14 | x, y = y, x+y 15 | end 16 | c.close 17 | end 18 | 19 | c = Channel.new(capacity: 10) 20 | Channel.go { fibonacci(c.capacity, c) } 21 | c.each { |i| puts i } 22 | 23 | __END__ 24 | 0 25 | 1 26 | 1 27 | 2 28 | 3 29 | 5 30 | 8 31 | 13 32 | 21 33 | 34 34 | -------------------------------------------------------------------------------- /examples/a-tour-of-go-channels/select.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## A Tour of Go: Select 8 | # https://tour.golang.org/concurrency/5 9 | 10 | def fibonacci(c, quit) 11 | x, y = 0, 1 12 | loop do 13 | Channel.select do |s| 14 | s.case(c, :<<, x) { x, y = y, x+y; x } # alias for `s.put` 15 | s.case(quit, :~) do # alias for `s.take` 16 | puts 'quit' 17 | return 18 | end 19 | end 20 | end 21 | end 22 | 23 | c = Channel.new 24 | quit = Channel.new 25 | 26 | Channel.go do 27 | 10.times { puts ~c } 28 | quit << 0 29 | end 30 | 31 | fibonacci(c, quit) 32 | 33 | __END__ 34 | 0 35 | 1 36 | 1 37 | 2 38 | 3 39 | 5 40 | 8 41 | 13 42 | 21 43 | 34 44 | quit 45 | -------------------------------------------------------------------------------- /examples/atomic_example.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | #$: << File.expand_path('../../lib', __FILE__) 4 | 5 | require 'concurrent/atomics' 6 | 7 | my_atomic = Concurrent::AtomicReference.new(0) 8 | my_atomic.update {|v| v + 1} 9 | puts "new value: #{my_atomic.value}" 10 | 11 | begin 12 | my_atomic.try_update {|v| v + 1} 13 | rescue Concurrent::Atomic::ConcurrentUpdateError => cue 14 | # deal with it (retry, propagate, etc) 15 | end 16 | puts "new value: #{my_atomic.value}" 17 | -------------------------------------------------------------------------------- /examples/benchmark_atomic_boolean.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | #$: << File.expand_path('../../lib', __FILE__) 4 | 5 | require 'concurrent/atomics' 6 | require 'benchmark' 7 | require 'rbconfig' 8 | 9 | THREADS = 1 10 | TESTS = 10_000_000 11 | 12 | def atomic_test(clazz, opts = {}) 13 | threads = opts.fetch(:threads, 5) 14 | tests = opts.fetch(:tests, 100) 15 | 16 | atomic = clazz.new 17 | latch = Concurrent::CountDownLatch.new(threads) 18 | 19 | print "Testing with #{clazz}...\n" 20 | Benchmark.bmbm do |bm| 21 | bm.report do 22 | threads.times do |i| 23 | Thread.new do 24 | tests.times{ atomic.value = true } 25 | latch.count_down 26 | end 27 | end 28 | latch.wait 29 | end 30 | end 31 | end 32 | 33 | puts "Testing with #{RbConfig::CONFIG['ruby_install_name']} #{RUBY_VERSION}" 34 | 35 | atomic_test(Concurrent::MutexAtomicBoolean, threads: THREADS, tests: TESTS) 36 | 37 | if defined? Concurrent::CAtomicBoolean 38 | atomic_test(Concurrent::CAtomicBoolean, threads: THREADS, tests: TESTS) 39 | elsif RUBY_PLATFORM == 'java' 40 | atomic_test(Concurrent::JavaAtomicBoolean, threads: THREADS, tests: TESTS) 41 | end 42 | -------------------------------------------------------------------------------- /examples/benchmark_atomic_fixnum.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | #$: << File.expand_path('../../lib', __FILE__) 4 | 5 | require 'concurrent/atomics' 6 | require 'benchmark' 7 | require 'rbconfig' 8 | 9 | THREADS = 1 10 | TESTS = 10_000_000 11 | 12 | def atomic_test(clazz, opts = {}) 13 | threads = opts.fetch(:threads, 5) 14 | tests = opts.fetch(:tests, 100) 15 | 16 | num = clazz.new 17 | latch = Concurrent::CountDownLatch.new(threads) 18 | 19 | print "Testing with #{clazz}...\n" 20 | Benchmark.bmbm do |bm| 21 | bm.report do 22 | threads.times do |i| 23 | Thread.new do 24 | tests.times{ num.up } 25 | latch.count_down 26 | end 27 | end 28 | latch.wait 29 | end 30 | end 31 | end 32 | 33 | puts "Testing with #{RbConfig::CONFIG['ruby_install_name']} #{RUBY_VERSION}" 34 | 35 | atomic_test(Concurrent::MutexAtomicFixnum, threads: THREADS, tests: TESTS) 36 | 37 | if defined? Concurrent::CAtomicFixnum 38 | atomic_test(Concurrent::CAtomicFixnum, threads: THREADS, tests: TESTS) 39 | elsif RUBY_PLATFORM == 'java' 40 | atomic_test(Concurrent::JavaAtomicFixnum, threads: THREADS, tests: TESTS) 41 | end 42 | -------------------------------------------------------------------------------- /examples/benchmark_map.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require 'benchmark/ips' 4 | require 'concurrent/map' 5 | 6 | hash = {} 7 | map = Concurrent::Map.new 8 | 9 | ENTRIES = 10_000 10 | 11 | ENTRIES.times do |i| 12 | hash[i] = i 13 | map[i] = i 14 | end 15 | 16 | TESTS = 1_000 17 | key = 2732 # srand(0) and rand(10_000) 18 | 19 | Benchmark.ips do |results| 20 | results.report('Hash#[]') do 21 | hash[key] 22 | end 23 | 24 | results.report('Map#[]') do 25 | map[key] 26 | end 27 | 28 | results.report('Hash#each_pair') do 29 | hash.each_pair { |k,v| v } 30 | end 31 | 32 | results.report('Map#each_pair') do 33 | map.each_pair { |k,v| v } 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /examples/format.rb: -------------------------------------------------------------------------------- 1 | require 'rubygems' 2 | require 'bundler/setup' 3 | require 'pry' 4 | require 'pp' 5 | 6 | input_paths = if ARGV.empty? 7 | Dir.glob("#{File.dirname(__FILE__)}/*.in.rb") 8 | else 9 | ARGV 10 | end.map { |p| File.expand_path p } 11 | 12 | input_paths.each_with_index do |input_path, i| 13 | 14 | pid = fork do 15 | require_relative 'init' 16 | 17 | begin 18 | output_path = input_path.gsub /\.in\.rb$/, '.out.rb' 19 | input = File.readlines(input_path) 20 | 21 | chunks = [] 22 | line = '' 23 | 24 | while !input.empty? 25 | line += input.shift 26 | if Pry::Code.complete_expression? line 27 | chunks << line 28 | line = '' 29 | end 30 | end 31 | 32 | raise unless line.empty? 33 | 34 | chunks.map! { |chunk| [chunk, [chunk.split($/).size, 1].max] } 35 | environment = Module.new.send :binding 36 | evaluate = ->(code, line) do 37 | eval(code, environment, input_path, line) 38 | end 39 | 40 | indent = 50 41 | 42 | line_count = 1 43 | output = '' 44 | chunks.each do |chunk, lines| 45 | result = evaluate.(chunk, line_count) 46 | unless chunk.strip.empty? || chunk =~ /\A *#/ 47 | pre_lines = chunk.lines.to_a 48 | last_line = pre_lines.pop 49 | output << pre_lines.join 50 | 51 | if last_line =~ /\#$/ 52 | output << last_line.gsub(/\#$/, '') 53 | else 54 | if last_line.size < indent && result.inspect.size < indent 55 | output << "%-#{indent}s %s" % [last_line.chomp, "# => #{result.inspect}\n"] 56 | else 57 | output << last_line << " # => #{result.inspect}\n" 58 | end 59 | end 60 | else 61 | output << chunk 62 | end 63 | line_count += lines 64 | end 65 | 66 | puts "#{input_path}\n -> #{output_path}" 67 | #puts output 68 | File.write(output_path, output) 69 | rescue => ex 70 | puts "#{ex} (#{ex.class})\n#{ex.backtrace * "\n"}" 71 | end 72 | end 73 | 74 | Process.wait pid 75 | end 76 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/channel-buffering.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Channel Buffering 8 | # https://gobyexample.com/channel-buffering 9 | 10 | messages = Channel.new(capacity: 2) # buffered 11 | 12 | messages.put 'buffered' 13 | messages.put 'channel' 14 | 15 | puts messages.take 16 | puts messages.take 17 | 18 | __END__ 19 | buffered 20 | channel 21 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/channel-directions.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Channel Direction 8 | # https://gobyexample.com/channel-directions 9 | 10 | # we can't force a channel to go only one direction w/i a function 11 | # but we can replicate the actual functionality from the example 12 | 13 | def ping(pings, msg) 14 | pings << msg 15 | end 16 | 17 | def pong(pings, pongs) 18 | msg = ~pings 19 | pongs << msg 20 | end 21 | 22 | pings = Channel.new(capacity: 1) # buffered 23 | pongs = Channel.new(capacity: 1) # buffered 24 | 25 | ping(pings, 'passed message') 26 | pong(pings, pongs) 27 | 28 | puts ~pongs 29 | 30 | __END__ 31 | passed message 32 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/channel-synchronization.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Channel Synchronizatio 8 | # https://gobyexample.com/channel-synchronization 9 | 10 | def worker(done_channel) 11 | print "working...\n" 12 | sleep(1) 13 | print "done\n" 14 | 15 | done_channel << true # alias for `#put` 16 | end 17 | 18 | done = Channel.new(capacity: 1) # buffered 19 | Channel.go{ worker(done) } 20 | 21 | ~done # alias for `#take` 22 | 23 | __END__ 24 | working... 25 | done 26 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/channels.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Unbuffered Channel 8 | # https://gobyexample.com/channels 9 | 10 | messages = Channel.new # unbuffered 11 | 12 | Channel.go do 13 | messages.put 'ping' 14 | end 15 | 16 | msg = messages.take 17 | puts msg 18 | 19 | __END__ 20 | ping 21 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/closing-channels.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Closing Channels 8 | # https://gobyexample.com/closing-channels 9 | 10 | validator = ->(v){ v.is_a? Numeric } 11 | jobs = Channel.new(buffer: :buffered, capacity: 5, 12 | validator: validator) 13 | done = Channel.new(buffer: :unbuffered) 14 | 15 | Channel.go_loop do 16 | j, more = jobs.next 17 | if more 18 | print "received job #{j}\n" 19 | true # loop again 20 | else 21 | print "received all jobs\n" 22 | done << true 23 | false # exit the loop 24 | end 25 | end 26 | 27 | (1..3).each do |i| 28 | jobs << i 29 | print "sent job #{i}\n" 30 | Thread.pass # give the worker a chance to run 31 | end 32 | 33 | jobs.close 34 | print "sent all jobs\n" 35 | ~done 36 | 37 | __END__ 38 | sent job 1 39 | received job 1 40 | sent job 2 41 | received job 2 42 | sent job 3 43 | received job 3 44 | sent all jobs 45 | received all jobs 46 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/non-blocking-channel-operations.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Non-Blocking Channel Operations 8 | # https://gobyexample.com/non-blocking-channel-operations 9 | 10 | messages = Channel.new # unbuffered 11 | signals = Channel.new # unbuffered 12 | 13 | Channel.select do |s| 14 | s.take(messages) { |msg| print "received message #{msg}\n" } 15 | s.default { print "no message received\n" } 16 | end 17 | 18 | message = 'hi' 19 | Channel.select do |s| 20 | s.put(messages, message) { |msg| print "sent message #{msg}\n" } 21 | s.default { print "no message sent\n" } 22 | end 23 | 24 | Channel.select do |s| 25 | s.case(messages, :~) { |msg| print "received message #{msg}\n" } # alias for `s.take` 26 | s.case(signals, :~) { |sig| print "received signal #{sig}\n" } # alias for `s.take` 27 | s.default { print "no activity\n" } 28 | end 29 | 30 | __END__ 31 | no message received 32 | no message sent 33 | no activity 34 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/range-over-channels.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Range over Channels 8 | # https://gobyexample.com/range-over-channels 9 | 10 | queue = Channel.new(capacity: 2) # buffered 11 | queue << 'one' 12 | queue << 'two' 13 | queue.close 14 | 15 | queue.each do |elem| 16 | print "#{elem}\n" 17 | end 18 | 19 | __END__ 20 | one 21 | two 22 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/rate-limiting.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | require 'time' 6 | 7 | Channel = Concurrent::Channel 8 | 9 | ## Go by Example: Rate Limiting 10 | # https://gobyexample.com/rate-limiting 11 | 12 | requests = Channel.new(buffer: :buffered, capacity: 5) 13 | (1..5).each do |i| 14 | requests << i 15 | end 16 | requests.close 17 | 18 | limiter = Channel.ticker(0.2) 19 | requests.each do |req| 20 | print "request #{req} #{Channel::Tick.new}\n" if ~limiter 21 | end 22 | print "\n" 23 | 24 | bursty_limiter = Channel.new(buffer: :buffered, capacity: 3) 25 | (1..3).each do 26 | bursty_limiter << Channel::Tick.new 27 | end 28 | 29 | ticker = Channel.ticker(0.2) 30 | Channel.go do 31 | ticker.each do |t| 32 | bursty_limiter << t 33 | end 34 | end 35 | 36 | bursty_requests = Channel.new(buffer: :buffered, capacity: 5) 37 | (1..5).each do |i| 38 | bursty_requests << i 39 | end 40 | bursty_requests.close 41 | 42 | bursty_requests.each do |req| 43 | ~bursty_limiter 44 | print "request #{req} #{Channel::Tick.new}\n" 45 | end 46 | 47 | limiter.close 48 | ticker.close 49 | 50 | __END__ 51 | request 1 2012-10-19 00:38:18.687438 +0000 UTC 52 | request 2 2012-10-19 00:38:18.887471 +0000 UTC 53 | request 3 2012-10-19 00:38:19.087238 +0000 UTC 54 | request 4 2012-10-19 00:38:19.287338 +0000 UTC 55 | request 5 2012-10-19 00:38:19.487331 +0000 UTC 56 | 57 | request 1 2012-10-19 00:38:20.487578 +0000 UTC 58 | request 2 2012-10-19 00:38:20.487645 +0000 UTC 59 | request 3 2012-10-19 00:38:20.487676 +0000 UTC 60 | request 4 2012-10-19 00:38:20.687483 +0000 UTC 61 | request 5 2012-10-19 00:38:20.887542 +0000 UTC 62 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/select.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Select 8 | # https://gobyexample.com/select 9 | 10 | c1 = Channel.new # unbuffered 11 | c2 = Channel.new # unbuffered 12 | 13 | Channel.go do 14 | sleep(1) 15 | c1 << 'one' 16 | end 17 | 18 | Channel.go do 19 | sleep(2) 20 | c1 << 'two' 21 | end 22 | 23 | 2.times do 24 | Channel.select do |s| 25 | s.take(c1) { |msg| print "received #{msg}\n" } 26 | s.take(c2) { |msg| print "received #{msg}\n" } 27 | end 28 | end 29 | 30 | __END__ 31 | received one 32 | received two 33 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/ticker.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Tickers 8 | # https://gobyexample.com/tickers 9 | 10 | ticker = Channel.ticker(0.5) 11 | Channel.go do 12 | ticker.each do |tick| 13 | print "Tick at #{tick}\n" if tick 14 | end 15 | end 16 | 17 | sleep(1.6) 18 | ticker.stop 19 | print "Ticker stopped\n" 20 | 21 | __END__ 22 | Tick at 2012-09-23 11:29:56.487625 -0700 PDT 23 | Tick at 2012-09-23 11:29:56.988063 -0700 PDT 24 | Tick at 2012-09-23 11:29:57.488076 -0700 PDT 25 | Ticker stopped 26 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/timeouts.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Timeouts 8 | # https://gobyexample.com/timeouts 9 | 10 | c1 = Channel.new(capacity: 1) # buffered 11 | Channel.go do 12 | sleep(2) 13 | c1 << 'result 1' 14 | end 15 | 16 | Channel.select do |s| 17 | s.take(c1) { |msg| print "#{msg}\n" } 18 | s.after(1) { print "timeout 1\n" } 19 | end 20 | 21 | c2 = Channel.new(capacity: 1) # buffered 22 | Channel.go do 23 | sleep(2) 24 | c2 << 'result 2' 25 | end 26 | 27 | Channel.select do |s| 28 | s.take(c2) { |msg| print "#{msg}\n" } 29 | s.after(3) { print "timeout 2\n" } 30 | end 31 | 32 | __END__ 33 | timeout 1 34 | result 2 35 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/timers.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Timers 8 | # https://gobyexample.com/timers 9 | 10 | timer1 = Channel.timer(2) 11 | 12 | puts 'Timer 1 expired' if ~timer1 13 | 14 | timer2 = Channel.timer(1) 15 | Channel.go do 16 | print "Timer 2 expired\n" if ~timer2 17 | end 18 | 19 | stop2 = timer2.stop 20 | print "Timer 2 stopped\n" if stop2 21 | 22 | __END__ 23 | Timer 1 expired 24 | Timer 2 stopped 25 | -------------------------------------------------------------------------------- /examples/go-by-example-channels/worker-pools.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../../lib', __FILE__) 4 | require 'concurrent-edge' 5 | Channel = Concurrent::Channel 6 | 7 | ## Go by Example: Go by Example: Worker Pools 8 | # https://gobyexample.com/worker-pools 9 | 10 | def worker(id, jobs, results) 11 | jobs.each do |j| 12 | print "worker #{id} processing job #{j}\n" 13 | sleep(1) 14 | results << j * 2 15 | end 16 | end 17 | 18 | jobs = Channel.new(buffer: :buffered, capacity: 100) 19 | results = Channel.new(buffer: :buffered, capacity: 100) 20 | 21 | (1..3).each do |w| 22 | Channel.go { worker(w, jobs, results) } 23 | end 24 | 25 | (1..9).each do |j| 26 | jobs << j 27 | end 28 | jobs.close 29 | 30 | (1..9).each do 31 | ~results 32 | end 33 | 34 | __END__ 35 | worker 1 processing job 1 36 | worker 2 processing job 2 37 | worker 3 processing job 3 38 | worker 1 processing job 4 39 | worker 2 processing job 5 40 | worker 3 processing job 6 41 | worker 1 processing job 7 42 | worker 2 processing job 8 43 | worker 3 processing job 9 44 | -------------------------------------------------------------------------------- /examples/graph_atomic_bench.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | #$: << File.expand_path('../../lib', __FILE__) 4 | 5 | require 'optparse' 6 | 7 | conf = { 8 | :vary => "threads", 9 | :lock => "atomic" 10 | } 11 | 12 | OptionParser.new do |opts| 13 | opts.on("-l", "--lock atomic|mutex") do |l| 14 | conf[:lock] = l 15 | end 16 | opts.on("-v", "--vary threads|speed") do |v| 17 | conf[:vary] = v 18 | end 19 | opts.on("-h", "--help"){ puts opts; exit } 20 | end.parse!(ARGV) 21 | 22 | result = File.open("results_#{conf[:lock]}_#{conf[:vary]}.csv", "w") 23 | 24 | 25 | if conf[:vary] == "threads" 26 | # Varies the number of concurrent threads that update the value. 27 | # 28 | # There is a total count of 1mio updates that is distributed 29 | # between the number of threads. 30 | # 31 | # A doubled number of threads is used so that even adds 1 and odd subtracts 1. 32 | # This avoids creating instances for Bignum since the number should 33 | # stay in the Fixnum range. 34 | # 35 | (1..100).each do |i| 36 | i = i * 2 37 | 38 | ret = [] 39 | 10.times do 40 | ret << `ruby #{File.dirname(__FILE__)}/benchmark_atomic_1.rb -l #{conf[:lock]} -t #{i}`.to_f 41 | end 42 | 43 | line = ([i] + ret).join(', ') 44 | 45 | puts line 46 | result.puts line 47 | end 48 | elsif conf[:vary] == "speed" 49 | # Varies the execution time of the update block 50 | # by using long calculation (MD5) 51 | # 52 | # NOTE: Thread.pass and sleep() are not usable by the atomic 53 | # lock. It needs to run the whole block without hitting 54 | # another atomic update otherwise it has to retry 55 | # 56 | # The expected result is that the atomic lock's performance 57 | # will hit a certain threshold where it will be worse than mutexes. 58 | # 59 | (1..30).each do |i| 60 | 61 | ret = [] 62 | 10.times do 63 | ret << `ruby #{File.dirname(__FILE__)}/benchmark_atomic_1.rb -l #{conf[:lock]} -s #{i}`.to_f 64 | end 65 | 66 | line = ([i] + ret).join(', ') 67 | 68 | puts line 69 | result.puts line 70 | end 71 | end 72 | -------------------------------------------------------------------------------- /examples/init.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent-edge' 2 | 3 | def do_stuff(*args) 4 | :stuff 5 | end 6 | 7 | Concurrent.use_simple_logger :DEBUG 8 | -------------------------------------------------------------------------------- /examples/stress_ruby_thread_pool.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | $: << File.expand_path('../../lib', __FILE__) 4 | 5 | require 'benchmark' 6 | require 'concurrent/executors' 7 | 8 | COUNT = 100_000 9 | 10 | executor = Concurrent::CachedThreadPool.new 11 | latch = Concurrent::CountDownLatch.new 12 | 13 | COUNT.times { executor.post{ nil } } 14 | 15 | #COUNT.times do |i| 16 | # executor.post{ nil } 17 | # sleep(0.01) if i % 1000 == 0 18 | #end 19 | 20 | executor.post{ latch.count_down } 21 | latch.wait 22 | 23 | puts "Max length: #{executor.max_length}" if executor.respond_to?(:max_length) 24 | puts "Largest length: #{executor.largest_length}" if executor.respond_to?(:largest_length) 25 | puts "Scheduled task count: #{executor.scheduled_task_count}" if executor.respond_to?(:scheduled_task_count) 26 | puts "Completed task count: #{executor.completed_task_count}" if executor.respond_to?(:completed_task_count) 27 | -------------------------------------------------------------------------------- /examples/thread_local_memory_usage.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | #$: << File.expand_path('../../lib', __FILE__) 4 | 5 | $DEBUG_TLV = true 6 | require 'concurrent' 7 | require 'concurrent/atomic/thread_local_var' 8 | require 'benchmark' 9 | require 'thread' 10 | 11 | include Concurrent 12 | 13 | # if we hold on to vars, but threads die, space used for TLVs should be recovered 14 | 15 | def test_thread_gc(vars) 16 | threads = 500.times.collect do 17 | Thread.new do 18 | vars.each do |var| 19 | var.value = 1 20 | end 21 | end 22 | end 23 | threads.each(&:join) 24 | end 25 | 26 | puts "BEFORE THREAD GC TEST:" 27 | puts "Ruby heap pages: #{GC.stat[:heap_length]}, Other malloc'd bytes: #{GC.stat[:malloc_increase]}" 28 | 29 | vars = 500.times.collect { ThreadLocalVar.new(0) } 30 | 200.times do 31 | test_thread_gc(vars) 32 | GC.start 33 | end 34 | 35 | puts "AFTER THREAD GC TEST:" 36 | puts "Ruby heap pages: #{GC.stat[:heap_length]}, Other malloc'd bytes: #{GC.stat[:malloc_increase]}" 37 | 38 | # if we hold on to threads, but drop TLVs, space used should be reused by allocated TLVs 39 | 40 | def tlv_gc_test_loop(queue) 41 | while true 42 | var = queue.pop 43 | return if var.nil? 44 | var.value = 1 45 | end 46 | end 47 | 48 | def test_tlv_gc(queues) 49 | 500.times do 50 | var = ThreadLocalVar.new(0) 51 | queues.each { |q| q << var } 52 | end 53 | end 54 | 55 | puts 56 | puts "BEFORE TLV GC TEST:" 57 | puts "Ruby heap pages: #{GC.stat[:heap_length]}, Other malloc'd bytes: #{GC.stat[:malloc_increase]}" 58 | 59 | queues = 500.times.collect { Queue.new } 60 | threads = queues.map do |queue| 61 | Thread.new do 62 | tlv_gc_test_loop(queue) 63 | end 64 | end 65 | 66 | 200.times do 67 | test_tlv_gc(queues) 68 | GC.start 69 | end 70 | queues.each { |q| q << nil } 71 | threads.each(&:join) 72 | 73 | puts "AFTER TLV GC TEST:" 74 | puts "Ruby heap pages: #{GC.stat[:heap_length]}, Other malloc'd bytes: #{GC.stat[:malloc_increase]}" 75 | -------------------------------------------------------------------------------- /examples/thread_local_var_bench.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | #$: << File.expand_path('../../lib', __FILE__) 4 | 5 | require 'concurrent' 6 | require 'concurrent/atomic/thread_local_var' 7 | require 'benchmark' 8 | 9 | include Concurrent 10 | 11 | N_THREADS = 100 12 | N_VARS = 100 13 | 14 | vars = N_VARS.times.collect { ThreadLocalVar.new(0) } 15 | 16 | def test_threadlocal_perf(vars) 17 | threads = N_THREADS.times.collect do 18 | Thread.new do 19 | 10000.times do 20 | index = rand(N_VARS) 21 | var = vars[index] 22 | var.value = var.value + 1 23 | end 24 | end 25 | end 26 | threads.each(&:join) 27 | end 28 | 29 | Benchmark.bmbm do |bm| 30 | bm.report('ThreadLocalVar') { test_threadlocal_perf(vars) } 31 | end 32 | -------------------------------------------------------------------------------- /examples/who.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require 'net/http' 4 | require 'json' 5 | 6 | # http://www.schneems.com/blogs/2015-09-30-reverse-rubygems/ 7 | 8 | gem_name = "concurrent-ruby" 9 | 10 | def rubygems_get(gem_name: "", endpoint: "") 11 | path = File.join("/api/v1/gems/", gem_name, endpoint).chomp("/") + ".json" 12 | JSON.parse(Net::HTTP.get("rubygems.org", path)) 13 | end 14 | 15 | results = rubygems_get(gem_name: gem_name, endpoint: "reverse_dependencies") 16 | 17 | weighted_results = {} 18 | results.each do |name| 19 | begin 20 | weighted_results[name] = rubygems_get(gem_name: name)["downloads"] 21 | rescue => e 22 | puts "#{name} #{e.message}" 23 | end 24 | end 25 | 26 | weighted_results.sort {|(k1, v1), (k2, v2)| v2 <=> v1 }.first(50).each_with_index do |(k, v), i| 27 | puts "#{i}) #{k}: #{v}" 28 | end 29 | -------------------------------------------------------------------------------- /ext/concurrent-ruby-ext/atomic_boolean.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "atomic_boolean.h" 4 | #include "atomic_reference.h" 5 | 6 | void atomic_boolean_mark(void *value) { 7 | rb_gc_mark_maybe((VALUE) value); 8 | } 9 | 10 | VALUE atomic_boolean_allocate(VALUE klass) { 11 | return rb_data_object_wrap(klass, (void *) Qfalse, atomic_boolean_mark, NULL); 12 | } 13 | 14 | VALUE method_atomic_boolean_initialize(int argc, VALUE* argv, VALUE self) { 15 | VALUE value = Qfalse; 16 | rb_check_arity(argc, 0, 1); 17 | if (argc == 1) value = TRUTHY(argv[0]); 18 | DATA_PTR(self) = (void *) value; 19 | return(self); 20 | } 21 | 22 | VALUE method_atomic_boolean_value(VALUE self) { 23 | return(ir_get(self)); 24 | } 25 | 26 | VALUE method_atomic_boolean_value_set(VALUE self, VALUE value) { 27 | VALUE new_value = TRUTHY(value); 28 | return(ir_set(self, new_value)); 29 | } 30 | 31 | VALUE method_atomic_boolean_true_question(VALUE self) { 32 | return(method_atomic_boolean_value(self)); 33 | } 34 | 35 | VALUE method_atomic_boolean_false_question(VALUE self) { 36 | VALUE current = method_atomic_boolean_value(self); 37 | return(current == Qfalse ? Qtrue : Qfalse); 38 | } 39 | 40 | VALUE method_atomic_boolean_make_true(VALUE self) { 41 | return(ir_compare_and_set(self, Qfalse, Qtrue)); 42 | } 43 | 44 | VALUE method_atomic_boolean_make_false(VALUE self) { 45 | return(ir_compare_and_set(self, Qtrue, Qfalse)); 46 | } 47 | -------------------------------------------------------------------------------- /ext/concurrent-ruby-ext/atomic_boolean.h: -------------------------------------------------------------------------------- 1 | #ifndef __ATOMIC_BOOLEAN_H__ 2 | #define __ATOMIC_BOOLEAN_H__ 3 | 4 | #define TRUTHY(value)(value == Qfalse || value == Qnil ? Qfalse : Qtrue) 5 | 6 | void atomic_boolean_mark(void*); 7 | VALUE atomic_boolean_allocate(VALUE); 8 | VALUE method_atomic_boolean_initialize(int, VALUE*, VALUE); 9 | VALUE method_atomic_boolean_value(VALUE); 10 | VALUE method_atomic_boolean_value_set(VALUE, VALUE); 11 | VALUE method_atomic_boolean_true_question(VALUE); 12 | VALUE method_atomic_boolean_false_question(VALUE); 13 | VALUE method_atomic_boolean_make_true(VALUE); 14 | VALUE method_atomic_boolean_make_false(VALUE); 15 | 16 | #endif 17 | -------------------------------------------------------------------------------- /ext/concurrent-ruby-ext/atomic_fixnum.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "atomic_fixnum.h" 4 | #include "atomic_reference.h" 5 | 6 | void atomic_fixnum_mark(void *value) { 7 | rb_gc_mark_maybe((VALUE) value); 8 | } 9 | 10 | VALUE atomic_fixnum_allocate(VALUE klass) { 11 | return rb_data_object_wrap(klass, (void *) Qnil, atomic_fixnum_mark, NULL); 12 | } 13 | 14 | VALUE method_atomic_fixnum_initialize(int argc, VALUE* argv, VALUE self) { 15 | VALUE value = LL2NUM(0); 16 | rb_check_arity(argc, 0, 1); 17 | if (argc == 1) { 18 | Check_Type(argv[0], T_FIXNUM); 19 | value = argv[0]; 20 | } 21 | DATA_PTR(self) = (void *) value; 22 | return(self); 23 | } 24 | 25 | VALUE method_atomic_fixnum_value(VALUE self) { 26 | return (VALUE) DATA_PTR(self); 27 | } 28 | 29 | VALUE method_atomic_fixnum_value_set(VALUE self, VALUE value) { 30 | Check_Type(value, T_FIXNUM); 31 | DATA_PTR(self) = (void *) value; 32 | return(value); 33 | } 34 | 35 | VALUE method_atomic_fixnum_increment(int argc, VALUE* argv, VALUE self) { 36 | long long value = NUM2LL((VALUE) DATA_PTR(self)); 37 | long long delta = 1; 38 | rb_check_arity(argc, 0, 1); 39 | if (argc == 1) { 40 | Check_Type(argv[0], T_FIXNUM); 41 | delta = NUM2LL(argv[0]); 42 | } 43 | return method_atomic_fixnum_value_set(self, LL2NUM(value + delta)); 44 | } 45 | 46 | VALUE method_atomic_fixnum_decrement(int argc, VALUE* argv, VALUE self) { 47 | long long value = NUM2LL((VALUE) DATA_PTR(self)); 48 | long long delta = 1; 49 | rb_check_arity(argc, 0, 1); 50 | if (argc == 1) { 51 | Check_Type(argv[0], T_FIXNUM); 52 | delta = NUM2LL(argv[0]); 53 | } 54 | return method_atomic_fixnum_value_set(self, LL2NUM(value - delta)); 55 | } 56 | 57 | VALUE method_atomic_fixnum_compare_and_set(VALUE self, VALUE rb_expect, VALUE rb_update) { 58 | Check_Type(rb_expect, T_FIXNUM); 59 | Check_Type(rb_update, T_FIXNUM); 60 | return ir_compare_and_set(self, rb_expect, rb_update); 61 | } 62 | 63 | VALUE method_atomic_fixnum_update(VALUE self) { 64 | VALUE old_value, new_value; 65 | for (;;) { 66 | old_value = method_atomic_fixnum_value(self); 67 | new_value = rb_yield(old_value); 68 | if (ir_compare_and_set(self, old_value, new_value) == Qtrue) { 69 | return new_value; 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /ext/concurrent-ruby-ext/atomic_fixnum.h: -------------------------------------------------------------------------------- 1 | #ifndef __ATOMIC_FIXNUM_H__ 2 | #define __ATOMIC_FIXNUM_H__ 3 | 4 | void atomic_fixnum_mark(void*); 5 | VALUE atomic_fixnum_allocate(VALUE); 6 | VALUE method_atomic_fixnum_initialize(int, VALUE*, VALUE); 7 | VALUE method_atomic_fixnum_value(VALUE); 8 | VALUE method_atomic_fixnum_value_set(VALUE, VALUE); 9 | VALUE method_atomic_fixnum_increment(int, VALUE*, VALUE); 10 | VALUE method_atomic_fixnum_decrement(int, VALUE*, VALUE); 11 | VALUE method_atomic_fixnum_compare_and_set(VALUE, VALUE, VALUE); 12 | VALUE method_atomic_fixnum_update(VALUE); 13 | 14 | #endif 15 | -------------------------------------------------------------------------------- /ext/concurrent-ruby-ext/atomic_reference.h: -------------------------------------------------------------------------------- 1 | #ifndef __ATOMIC_REFERENCE_H__ 2 | #define __ATOMIC_REFERENCE_H__ 3 | 4 | #if defined(__sun) 5 | #include 6 | #endif 7 | 8 | #ifdef HAVE_LIBKERN_OSATOMIC_H 9 | #include 10 | #endif 11 | 12 | void ir_mark(void*); 13 | VALUE ir_alloc(VALUE); 14 | VALUE ir_initialize(int, VALUE*, VALUE); 15 | VALUE ir_get(VALUE); 16 | VALUE ir_set(VALUE, VALUE); 17 | VALUE ir_get_and_set(VALUE, VALUE); 18 | VALUE ir_compare_and_set(volatile VALUE, VALUE, VALUE); 19 | 20 | #endif 21 | -------------------------------------------------------------------------------- /ext/concurrent-ruby-ext/extconf.rb: -------------------------------------------------------------------------------- 1 | require 'fileutils' 2 | require 'mkmf' 3 | 4 | unless RUBY_ENGINE == "ruby" 5 | File.write("Makefile", dummy_makefile($srcdir).join("")) 6 | exit 7 | end 8 | 9 | extension_name = 'concurrent_ruby_ext' 10 | 11 | dir_config(extension_name) 12 | have_header "libkern/OSAtomic.h" 13 | 14 | compiler_is_gcc = (CONFIG["GCC"] && !CONFIG["GCC"].empty?) || 15 | # This could stand to be more generic... but I am afraid. 16 | CONFIG["CC"] =~ /\bgcc\b/ 17 | 18 | if compiler_is_gcc 19 | case CONFIG["arch"] 20 | when /mswin32|mingw|solaris/ 21 | $CFLAGS += " -march=native" 22 | when 'i686-linux' 23 | $CFLAGS += " -march=i686" 24 | end 25 | end 26 | 27 | create_makefile File.join('concurrent', extension_name) 28 | -------------------------------------------------------------------------------- /ext/concurrent-ruby/ConcurrentRubyService.java: -------------------------------------------------------------------------------- 1 | import org.jruby.Ruby; 2 | import org.jruby.runtime.load.BasicLibraryService; 3 | 4 | import java.io.IOException; 5 | 6 | public class ConcurrentRubyService implements BasicLibraryService { 7 | 8 | public boolean basicLoad(final Ruby runtime) throws IOException { 9 | new com.concurrent_ruby.ext.AtomicReferenceLibrary().load(runtime, false); 10 | new com.concurrent_ruby.ext.JavaAtomicBooleanLibrary().load(runtime, false); 11 | new com.concurrent_ruby.ext.JavaAtomicFixnumLibrary().load(runtime, false); 12 | new com.concurrent_ruby.ext.JavaSemaphoreLibrary().load(runtime, false); 13 | new com.concurrent_ruby.ext.SynchronizationLibrary().load(runtime, false); 14 | new com.concurrent_ruby.ext.JRubyMapBackendLibrary().load(runtime, false); 15 | return true; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /ext/concurrent-ruby/com/concurrent_ruby/ext/jsr166e/ConcurrentHashMap.java: -------------------------------------------------------------------------------- 1 | package com.concurrent_ruby.ext.jsr166e; 2 | 3 | import java.util.Map; 4 | import java.util.Set; 5 | 6 | public interface ConcurrentHashMap { 7 | /** Interface describing a function of one argument */ 8 | public interface Fun { T apply(A a); } 9 | /** Interface describing a function of two arguments */ 10 | public interface BiFun { T apply(A a, B b); } 11 | 12 | public V get(K key); 13 | public V put(K key, V value); 14 | public V putIfAbsent(K key, V value); 15 | public V computeIfAbsent(K key, Fun mf); 16 | public V computeIfPresent(K key, BiFun mf); 17 | public V compute(K key, BiFun mf); 18 | public V merge(K key, V value, BiFun mf); 19 | public boolean replace(K key, V oldVal, V newVal); 20 | public V replace(K key, V value); 21 | public boolean containsKey(K key); 22 | public boolean remove(Object key, Object value); 23 | public V remove(K key); 24 | public void clear(); 25 | public Set> entrySet(); 26 | public int size(); 27 | public V getValueOrDefault(Object key, V defaultValue); 28 | 29 | public boolean containsValue(V value); 30 | public K findKey(V value); 31 | } 32 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent-edge.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent' 2 | 3 | require 'concurrent/edge/version' 4 | 5 | require 'concurrent/actor' 6 | require 'concurrent/agent' 7 | require 'concurrent/channel' 8 | require 'concurrent/lazy_register' 9 | require 'concurrent/executor/wrapping_executor' 10 | 11 | require 'concurrent/edge/lock_free_linked_set' 12 | require 'concurrent/edge/lock_free_queue' 13 | 14 | require 'concurrent/edge/cancellation' 15 | require 'concurrent/edge/throttle' 16 | require 'concurrent/edge/channel' 17 | 18 | require 'concurrent/edge/processing_actor' 19 | require 'concurrent/edge/erlang_actor' 20 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/concern/logging' 2 | require 'concurrent/actor/type_check' 3 | require 'concurrent/actor/internal_delegations' 4 | 5 | module Concurrent 6 | module Actor 7 | module Behaviour 8 | class Abstract 9 | include TypeCheck 10 | include InternalDelegations 11 | 12 | attr_reader :core, :subsequent 13 | 14 | def initialize(core, subsequent, core_options) 15 | @core = Type! core, Core 16 | @subsequent = Type! subsequent, Abstract, NilClass 17 | end 18 | 19 | # override to add extra behaviour 20 | # @note super needs to be called not to break the chain 21 | def on_envelope(envelope) 22 | pass envelope 23 | end 24 | 25 | # @param [Envelope] envelope to pass to {#subsequent} behaviour 26 | def pass(envelope) 27 | subsequent.on_envelope envelope 28 | end 29 | 30 | # override to add extra behaviour 31 | # @note super needs to be called not to break the chain 32 | def on_event(public, event) 33 | subsequent.on_event public, event if subsequent 34 | end 35 | 36 | # broadcasts event to all behaviours and context 37 | # @see #on_event 38 | # @see AbstractContext#on_event 39 | def broadcast(public, event) 40 | core.broadcast(public, event) 41 | end 42 | 43 | def reject_envelope(envelope) 44 | envelope.reject! ActorTerminated.new(reference) 45 | dead_letter_routing << envelope unless envelope.future 46 | log(DEBUG) { "rejected #{envelope.message} from #{envelope.sender_path}"} 47 | end 48 | end 49 | end 50 | end 51 | end 52 | 53 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/behaviour/awaits.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/behaviour/abstract' 2 | 3 | module Concurrent 4 | module Actor 5 | module Behaviour 6 | 7 | # Accepts `:await` messages. Which allows to wait on Actor to process all previously send 8 | # messages. 9 | # 10 | # actor << :a << :b 11 | # actor.ask(:await).wait # blocks until :a and :b are processed 12 | class Awaits < Abstract 13 | def on_envelope(envelope) 14 | if envelope.message == :await 15 | true 16 | else 17 | pass envelope 18 | end 19 | end 20 | end 21 | end 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/behaviour/errors_on_unknown_message.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/behaviour/abstract' 2 | 3 | module Concurrent 4 | module Actor 5 | module Behaviour 6 | # Simply fails when message arrives here. It's usually the last behaviour. 7 | class ErrorsOnUnknownMessage < Abstract 8 | def on_envelope(envelope) 9 | raise UnknownMessage, envelope 10 | end 11 | end 12 | end 13 | end 14 | end 15 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/behaviour/executes_context.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/behaviour/abstract' 2 | 3 | module Concurrent 4 | module Actor 5 | module Behaviour 6 | # Delegates messages and events to {AbstractContext} instance. 7 | class ExecutesContext < Abstract 8 | def on_envelope(envelope) 9 | context.on_envelope envelope 10 | end 11 | 12 | def on_event(public, event) 13 | context.on_event(event) 14 | super public, event 15 | end 16 | end 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/behaviour/removes_child.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/behaviour/abstract' 2 | 3 | module Concurrent 4 | module Actor 5 | module Behaviour 6 | # Removes terminated children. 7 | class RemovesChild < Abstract 8 | def on_envelope(envelope) 9 | if envelope.message == :remove_child 10 | core.remove_child envelope.sender 11 | else 12 | pass envelope 13 | end 14 | end 15 | end 16 | end 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/behaviour/sets_results.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/behaviour/abstract' 2 | 3 | module Concurrent 4 | module Actor 5 | module Behaviour 6 | # Collects returning value and sets the ResolvableFuture in the {Envelope} or error on failure. 7 | class SetResults < Abstract 8 | attr_reader :error_strategy 9 | 10 | def initialize(core, subsequent, core_options, error_strategy) 11 | super core, subsequent, core_options 12 | @error_strategy = Match! error_strategy, :just_log, :terminate!, :pause! 13 | end 14 | 15 | def on_envelope(envelope) 16 | result = pass envelope 17 | if result != MESSAGE_PROCESSED && !envelope.future.nil? 18 | envelope.future.fulfill result 19 | log(DEBUG) { "finished processing of #{envelope.message.inspect}"} 20 | end 21 | nil 22 | rescue => error 23 | log ERROR, error 24 | case error_strategy 25 | when :terminate! 26 | terminate! 27 | when :pause! 28 | behaviour!(Pausing).pause!(error) 29 | when :just_log 30 | # nothing 31 | else 32 | raise 33 | end 34 | envelope.future.reject error unless envelope.future.nil? 35 | end 36 | end 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/behaviour/supervising.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/behaviour/abstract' 2 | 3 | module Concurrent 4 | module Actor 5 | module Behaviour 6 | 7 | # Handles supervised actors. Handle configures what to do with failed child: :terminate!, :resume!, :reset!, 8 | # or :restart!. Strategy sets :one_for_one (restarts just failed actor) or :one_for_all (restarts all child actors). 9 | # @note TODO missing example 10 | # @note this will change in next version to support supervision trees better 11 | class Supervising < Abstract 12 | def initialize(core, subsequent, core_options, handle, strategy) 13 | super core, subsequent, core_options 14 | @handle = Match! handle, :terminate!, :resume!, :reset!, :restart! 15 | @strategy = case @handle 16 | when :terminate! 17 | Match! strategy, nil 18 | when :resume! 19 | Match! strategy, :one_for_one 20 | when :reset!, :restart! 21 | Match! strategy, :one_for_one, :one_for_all 22 | end 23 | end 24 | 25 | def on_envelope(envelope) 26 | case envelope.message 27 | when Exception, :paused 28 | receivers = if @strategy == :one_for_all 29 | children 30 | else 31 | [envelope.sender] 32 | end 33 | receivers.each { |ch| ch << @handle } 34 | else 35 | pass envelope 36 | end 37 | end 38 | end 39 | end 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/default_dead_letter_handler.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/context' 2 | 3 | module Concurrent 4 | module Actor 5 | class DefaultDeadLetterHandler < RestartingContext 6 | def on_message(dead_letter) 7 | log(INFO) { "got dead letter #{dead_letter.inspect}"} 8 | end 9 | end 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/envelope.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/type_check' 2 | 3 | module Concurrent 4 | module Actor 5 | class Envelope 6 | include TypeCheck 7 | 8 | # @!attribute [r] message 9 | # @return [Object] a message 10 | # @!attribute [r] future 11 | # @return [Edge::Future] a future which becomes resolved after message is processed 12 | # @!attribute [r] sender 13 | # @return [Reference, Thread] an actor or thread sending the message 14 | # @!attribute [r] address 15 | # @return [Reference] where this message will be delivered 16 | 17 | attr_reader :message, :future, :sender, :address 18 | 19 | def initialize(message, future, sender, address) 20 | @message = message 21 | @future = Type! future, Promises::ResolvableFuture, NilClass 22 | @sender = Type! sender, Reference, Thread 23 | @address = Type! address, Reference 24 | end 25 | 26 | def sender_path 27 | if sender.is_a? Reference 28 | sender.path 29 | else 30 | sender.to_s 31 | end 32 | end 33 | 34 | def address_path 35 | address.path 36 | end 37 | 38 | def reject!(error) 39 | future.reject error unless future.nil? 40 | end 41 | end 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/errors.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/type_check' 2 | 3 | module Concurrent 4 | module Actor 5 | Error = Class.new(StandardError) 6 | 7 | class ActorTerminated < Error 8 | include TypeCheck 9 | 10 | attr_reader :reference 11 | 12 | def initialize(reference) 13 | @reference = Type! reference, Reference 14 | super reference.path 15 | end 16 | end 17 | 18 | class UnknownMessage < Error 19 | include TypeCheck 20 | 21 | attr_reader :envelope 22 | 23 | def initialize(envelope) 24 | @envelope = Type! envelope, Envelope 25 | super "#{envelope.message.inspect} from #{envelope.sender_path}" 26 | end 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/concern/logging' 2 | require 'concurrent/actor/public_delegations' 3 | 4 | module Concurrent 5 | module Actor 6 | module InternalDelegations 7 | include PublicDelegations 8 | include Concurrent::Concern::Logging 9 | 10 | # @see Core#children 11 | def children 12 | core.children 13 | end 14 | 15 | # @see Termination#terminate! 16 | def terminate!(reason = nil) 17 | behaviour!(Behaviour::Termination).terminate!(reason) 18 | end 19 | 20 | # @see Termination#terminated? 21 | def terminated? 22 | behaviour!(Behaviour::Termination).terminated? 23 | end 24 | 25 | # # @see Termination#reason 26 | # def reason 27 | # behaviour!(Behaviour::Termination).reason 28 | # end 29 | 30 | # delegates to core.log 31 | # @see Logging#log 32 | def log(level, message = nil, &block) 33 | core.log(level, message, &block) 34 | end 35 | 36 | # @see AbstractContext#dead_letter_routing 37 | def dead_letter_routing 38 | context.dead_letter_routing 39 | end 40 | 41 | def redirect(reference, envelope = self.envelope) 42 | reference.message(envelope.message, envelope.future) 43 | Behaviour::MESSAGE_PROCESSED 44 | end 45 | 46 | # @return [AbstractContext] 47 | def context 48 | core.context 49 | end 50 | 51 | # see Core#behaviour 52 | def behaviour(behaviour_class) 53 | core.behaviour(behaviour_class) 54 | end 55 | 56 | # see Core#behaviour! 57 | def behaviour!(behaviour_class) 58 | core.behaviour!(behaviour_class) 59 | end 60 | 61 | end 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | module Actor 3 | 4 | # Provides publicly expose-able methods from {Core}. 5 | module PublicDelegations 6 | # @see Core#name 7 | def name 8 | core.name 9 | end 10 | 11 | # @see Core#path 12 | def path 13 | core.path 14 | end 15 | 16 | # @see Core#parent 17 | def parent 18 | core.parent 19 | end 20 | 21 | # @see Core#reference 22 | def reference 23 | core.reference 24 | end 25 | 26 | # @see Core#executor 27 | def executor 28 | core.executor 29 | end 30 | 31 | # @see Core#context_class 32 | def context_class 33 | core.context_class 34 | end 35 | 36 | alias_method :ref, :reference 37 | alias_method :actor_class, :context_class 38 | end 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/root.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/context' 2 | require 'concurrent/actor/core' 3 | 4 | module Concurrent 5 | module Actor 6 | # implements the root actor 7 | class Root < AbstractContext 8 | 9 | def initialize 10 | # noinspection RubyArgCount 11 | @dead_letter_router = Core.new(parent: reference, 12 | class: DefaultDeadLetterHandler, 13 | supervise: true, 14 | name: :default_dead_letter_handler).reference 15 | end 16 | 17 | # to allow spawning of new actors, spawn needs to be called inside the parent Actor 18 | def on_message(message) 19 | case 20 | when message.is_a?(::Array) && message.first == :spawn 21 | Actor.spawn message[1], &message[2] 22 | when message == :dead_letter_routing 23 | @dead_letter_router 24 | else 25 | # ignore 26 | end 27 | end 28 | 29 | def dead_letter_routing 30 | @dead_letter_router 31 | end 32 | 33 | def behaviour_definition 34 | [*Behaviour.base(:just_log), 35 | *Behaviour.supervising, 36 | *Behaviour.user_messages] 37 | end 38 | end 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/type_check.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | module Actor 3 | 4 | # taken from Algebrick 5 | # supplies type-checking helpers whenever included 6 | module TypeCheck 7 | 8 | def Type?(value, *types) 9 | types.any? { |t| value.is_a? t } 10 | end 11 | 12 | def Type!(value, *types) 13 | Type?(value, *types) or 14 | TypeCheck.error(value, 'is not', types) 15 | value 16 | end 17 | 18 | def Match?(value, *types) 19 | types.any? { |t| t === value } 20 | end 21 | 22 | def Match!(value, *types) 23 | Match?(value, *types) or 24 | TypeCheck.error(value, 'is not matching', types) 25 | value 26 | end 27 | 28 | def Child?(value, *types) 29 | Type?(value, Class) && 30 | types.any? { |t| value <= t } 31 | end 32 | 33 | def Child!(value, *types) 34 | Child?(value, *types) or 35 | TypeCheck.error(value, 'is not child', types) 36 | value 37 | end 38 | 39 | private 40 | 41 | def self.error(value, message, types) 42 | raise TypeError, 43 | "Value (#{value.class}) '#{value}' #{message} any of: #{types.join('; ')}." 44 | end 45 | end 46 | end 47 | end 48 | 49 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/utils.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | module Actor 3 | module Utils 4 | require 'concurrent/actor/utils/ad_hoc' 5 | require 'concurrent/actor/utils/broadcast' 6 | require 'concurrent/actor/utils/balancer' 7 | require 'concurrent/actor/utils/pool' 8 | end 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/utils/ad_hoc.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/context' 2 | 3 | module Concurrent 4 | module Actor 5 | module Utils 6 | 7 | module AsAdHoc 8 | def initialize(*args, &initializer) 9 | @on_message = Type! initializer.call(*args), Proc 10 | end 11 | 12 | def on_message(message) 13 | instance_exec message, &@on_message 14 | end 15 | end 16 | 17 | # Allows quick creation of actors with behaviour defined by blocks. 18 | # @example ping 19 | # AdHoc.spawn :forward, an_actor do |where| 20 | # # this block has to return proc defining #on_message behaviour 21 | # -> message { where.tell message } 22 | # end 23 | class AdHoc < Context 24 | include AsAdHoc 25 | end 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/utils/balancer.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/actor/context' 2 | 3 | module Concurrent 4 | module Actor 5 | module Utils 6 | 7 | # Distributes messages between subscribed actors. Each actor'll get only one message then 8 | # it's unsubscribed. The actor needs to resubscribe when it's ready to receive next message. 9 | # It will buffer the messages if there is no worker registered. 10 | # @see Pool 11 | class Balancer < RestartingContext 12 | 13 | def initialize 14 | @receivers = [] 15 | @buffer = [] 16 | end 17 | 18 | def on_message(message) 19 | command, who = message 20 | case command 21 | when :subscribe 22 | @receivers << (who || envelope.sender) 23 | distribute 24 | true 25 | when :unsubscribe 26 | @receivers.delete(who || envelope.sender) 27 | true 28 | when :subscribed? 29 | @receivers.include?(who || envelope.sender) 30 | else 31 | @buffer << envelope 32 | distribute 33 | Behaviour::MESSAGE_PROCESSED 34 | end 35 | end 36 | 37 | def distribute 38 | while !@receivers.empty? && !@buffer.empty? 39 | redirect @receivers.shift, @buffer.shift 40 | end 41 | end 42 | end 43 | end 44 | end 45 | end 46 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/actor/utils/broadcast.rb: -------------------------------------------------------------------------------- 1 | require 'set' 2 | require 'concurrent/actor/context' 3 | 4 | module Concurrent 5 | module Actor 6 | module Utils 7 | 8 | # Allows to build pub/sub easily. 9 | # @example news 10 | # news_channel = Concurrent::Actor::Utils::Broadcast.spawn :news 11 | # 12 | # 2.times do |i| 13 | # Concurrent::Actor::Utils::AdHoc.spawn "listener-#{i}" do 14 | # news_channel << :subscribe 15 | # -> message { puts message } 16 | # end 17 | # end 18 | # 19 | # news_channel << 'Ruby rocks!' 20 | # # prints: 'Ruby rocks!' twice 21 | class Broadcast < RestartingContext 22 | 23 | def initialize 24 | @receivers = Set.new 25 | end 26 | 27 | def on_message(message) 28 | case message 29 | when :subscribe 30 | if envelope.sender.is_a? Reference 31 | @receivers.add envelope.sender 32 | true 33 | else 34 | false 35 | end 36 | when :unsubscribe 37 | !!@receivers.delete(envelope.sender) 38 | when :subscribed? 39 | @receivers.include? envelope.sender 40 | else 41 | filtered_receivers.each { |r| r << message } 42 | end 43 | end 44 | 45 | # override to define different behaviour, filtering etc 46 | def filtered_receivers 47 | @receivers 48 | end 49 | end 50 | 51 | end 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/buffer.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/channel/buffer/base' 2 | 3 | require 'concurrent/channel/buffer/buffered' 4 | require 'concurrent/channel/buffer/dropping' 5 | require 'concurrent/channel/buffer/sliding' 6 | require 'concurrent/channel/buffer/unbuffered' 7 | 8 | require 'concurrent/channel/buffer/ticker' 9 | require 'concurrent/channel/buffer/timer' 10 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/buffer/dropping.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/channel/buffer/base' 2 | require 'concurrent/channel/buffer/buffered' 3 | 4 | module Concurrent 5 | class Channel 6 | module Buffer 7 | 8 | # A non-blocking, buffered buffer of fixed maximum capacity. When the 9 | # maximum capacity is reached subsequent {#put} and {#offer} operations 10 | # will complete but the `put` item will be discarded; no transfer will 11 | # occur. 12 | class Dropping < Buffered 13 | 14 | # @!method put(item) 15 | # @!macro channel_buffer_put 16 | # 17 | # When the buffer is full, this method will return `true` 18 | # immediately but the item will be discarded. The item will *not* 19 | # be placed into the buffer (no transfer will occur). 20 | 21 | # @!method offer(item) 22 | # @!macro channel_buffer_offer 23 | # 24 | # When the buffer is full, this method will return `true` 25 | # immediately but the item will be discarded. The item will *not* 26 | # be placed into the buffer (no transfer will occur). 27 | 28 | # @!method full? 29 | # @!macro channel_buffer_full_question 30 | # 31 | # Always returns `false`. 32 | 33 | # @!macro channel_buffer_blocking_question 34 | # 35 | # Always returns `false`. 36 | def blocking? 37 | false 38 | end 39 | 40 | private 41 | 42 | # @!macro channel_buffer_full_question 43 | def ns_full? 44 | false 45 | end 46 | 47 | # @!macro channel_buffer_put 48 | def ns_put_onto_buffer(item) 49 | buffer.push(item) unless buffer.size == capacity 50 | end 51 | end 52 | end 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/buffer/sliding.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/channel/buffer/base' 2 | require 'concurrent/channel/buffer/buffered' 3 | 4 | module Concurrent 5 | class Channel 6 | module Buffer 7 | 8 | # A non-blocking, buffered buffer of fixed maximum capacity. When the 9 | # maximum capacity is reached subsequent {#put} and {#offer} operations 10 | # will complete and the item will be `put`, but the oldest elements in 11 | # the buffer will be discarded (not transferred). 12 | class Sliding < Buffered 13 | 14 | # @!method put(item) 15 | # @!macro channel_buffer_put 16 | # 17 | # When the buffer is full, this method will return `true` 18 | # immediately and the item will be inserted, but the oldest 19 | # elements in the buffer will be discarded (not transferred). 20 | 21 | # @!method offer(item) 22 | # @!macro channel_buffer_offer 23 | # 24 | # When the buffer is full, this method will return `true` 25 | # immediately and the item will be inserted, but the oldest 26 | # elements in the buffer will be discarded (not transferred). 27 | 28 | # @!method full? 29 | # @!macro channel_buffer_full_question 30 | # 31 | # Always returns `false`. 32 | 33 | # @!macro channel_buffer_blocking_question 34 | # 35 | # Always returns `false`. 36 | def blocking? 37 | false 38 | end 39 | 40 | private 41 | 42 | # @!macro channel_buffer_full_question 43 | def ns_full? 44 | false 45 | end 46 | 47 | # @!macro channel_buffer_put 48 | def ns_put_onto_buffer(item) 49 | buffer.shift if buffer.size == capacity 50 | buffer.push(item) 51 | end 52 | end 53 | end 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/buffer/ticker.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/constants' 2 | require 'concurrent/utility/monotonic_time' 3 | require 'concurrent/channel/tick' 4 | require 'concurrent/channel/buffer/timer' 5 | 6 | module Concurrent 7 | class Channel 8 | module Buffer 9 | 10 | class Ticker < Timer 11 | 12 | private 13 | 14 | def ns_initialize(interval) 15 | @interval = interval.to_f 16 | @next_tick = Concurrent.monotonic_time + interval 17 | self.capacity = 1 18 | end 19 | 20 | def do_poll 21 | synchronize do 22 | if ns_closed? 23 | return Concurrent::NULL, false 24 | elsif (now = Concurrent.monotonic_time) >= @next_tick 25 | tick = Concurrent::Channel::Tick.new(@next_tick) 26 | @next_tick = now + @interval 27 | return tick, true 28 | else 29 | return nil, true 30 | end 31 | end 32 | end 33 | end 34 | end 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/constants' 2 | require 'concurrent/utility/monotonic_time' 3 | require 'concurrent/channel/tick' 4 | require 'concurrent/channel/buffer/base' 5 | 6 | module Concurrent 7 | class Channel 8 | module Buffer 9 | 10 | class Timer < Base 11 | 12 | def put(item) 13 | false 14 | end 15 | 16 | def offer(item) 17 | false 18 | end 19 | 20 | def take 21 | loop do 22 | tick, _ = do_poll 23 | if tick 24 | return tick 25 | else 26 | Thread.pass 27 | end 28 | end 29 | end 30 | 31 | def next 32 | loop do 33 | tick, more = do_poll 34 | return tick, more if tick 35 | Thread.pass 36 | end 37 | end 38 | 39 | def poll 40 | tick, _ = do_poll 41 | tick = Concurrent::NULL unless tick 42 | tick 43 | end 44 | 45 | private 46 | 47 | def ns_initialize(delay) 48 | @tick = Concurrent.monotonic_time + delay.to_f 49 | self.capacity = 1 50 | end 51 | 52 | def ns_size 53 | 0 54 | end 55 | 56 | def ns_empty? 57 | false 58 | end 59 | 60 | def ns_full? 61 | true 62 | end 63 | 64 | def do_poll 65 | synchronize do 66 | if ns_closed? 67 | return Concurrent::NULL, false 68 | elsif Concurrent.monotonic_time >= @tick 69 | # only one listener gets notified 70 | self.closed = true 71 | return Concurrent::Channel::Tick.new(@tick), false 72 | else 73 | return nil, true 74 | end 75 | end 76 | end 77 | end 78 | end 79 | end 80 | end 81 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/selector/after_clause.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/maybe' 2 | require 'concurrent/utility/monotonic_time' 3 | 4 | module Concurrent 5 | class Channel 6 | class Selector 7 | 8 | class AfterClause 9 | 10 | def initialize(seconds, block) 11 | raise ArgumentError.new('timeout must 0.0 or more') if seconds.to_f < 0.0 12 | @end = Concurrent.monotonic_time + seconds.to_f 13 | @block = block 14 | end 15 | 16 | def execute 17 | if Concurrent.monotonic_time > @end 18 | result = @block ? @block.call : nil 19 | Concurrent::Maybe.just(result) 20 | else 21 | Concurrent::Maybe.nothing 22 | end 23 | end 24 | end 25 | end 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/selector/default_clause.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/maybe' 2 | 3 | module Concurrent 4 | class Channel 5 | class Selector 6 | 7 | class DefaultClause 8 | 9 | def initialize(block) 10 | @block = block 11 | end 12 | 13 | def execute 14 | Concurrent::Maybe.just(@block.call) 15 | end 16 | end 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/selector/error_clause.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | class Channel 3 | class Selector 4 | 5 | class ErrorClause 6 | 7 | def initialize(block) 8 | @block = block 9 | end 10 | 11 | def execute(error) 12 | @block.call(error) 13 | rescue 14 | # suppress and move on 15 | ensure 16 | return nil 17 | end 18 | end 19 | end 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/selector/put_clause.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/maybe' 2 | 3 | module Concurrent 4 | class Channel 5 | class Selector 6 | 7 | class PutClause 8 | 9 | def initialize(channel, message, block) 10 | @channel = channel 11 | @message = message 12 | @block = block 13 | end 14 | 15 | def execute 16 | if @channel.offer?(@message).just? 17 | result = @block ? @block.call : nil 18 | Concurrent::Maybe.just(result) 19 | else 20 | Concurrent::Maybe.nothing 21 | end 22 | end 23 | end 24 | end 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/selector/take_clause.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/maybe' 2 | 3 | module Concurrent 4 | class Channel 5 | class Selector 6 | 7 | class TakeClause 8 | 9 | def initialize(channel, block) 10 | @channel = channel 11 | @block = block 12 | end 13 | 14 | def execute 15 | if (result = @channel.poll?).just? 16 | Concurrent::Maybe.just(@block.call(result.value)) 17 | else 18 | Concurrent::Maybe.nothing 19 | end 20 | end 21 | end 22 | end 23 | end 24 | end 25 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/channel/tick.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/object' 2 | require 'concurrent/utility/monotonic_time' 3 | 4 | module Concurrent 5 | class Channel 6 | 7 | # A convenience class representing a single moment in monotonic time. 8 | # Returned by {Concurrent::Channel} tickers and timers when they 9 | # resolve. 10 | # 11 | # Includes `Comparable` and can be compared to monotonic_time, UTC 12 | # time, or epoch time. 13 | # 14 | # @see Concurrent.monotonic_time 15 | # @see Concurrent::Channel.ticker 16 | # @see Concurrent::Channel.timer 17 | class Tick < Synchronization::Object 18 | include Comparable 19 | safe_initialization! 20 | 21 | STRING_FORMAT = '%F %T.%6N %z %Z'.freeze 22 | 23 | attr_reader :monotonic, :utc 24 | 25 | def initialize(tick = Concurrent.monotonic_time) 26 | @monotonic = tick 27 | @utc = monotonic_to_utc(tick).freeze 28 | end 29 | 30 | def epoch 31 | @utc.to_f 32 | end 33 | 34 | def to_s 35 | @utc.strftime(STRING_FORMAT) 36 | end 37 | 38 | def <=>(other) 39 | if other.is_a? Numeric 40 | @monotonic <=> other 41 | elsif other.is_a? Time 42 | @utc <=> other.utc 43 | elsif other.is_a? Tick 44 | @monotonic <=> other.monotonic 45 | else 46 | nil 47 | end 48 | end 49 | 50 | private 51 | 52 | def monotonic_to_utc(tick) 53 | Time.now.utc + Concurrent.monotonic_time - tick 54 | end 55 | end 56 | end 57 | end 58 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/edge.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | 3 | # A submodule for unstable, highly experimental features that are likely to 4 | # change often and which may never become part of the core gem. Also for 5 | # new, experimental version of abstractions already in the core gem. 6 | # 7 | # Most new features should start in this module, clearly indicating the 8 | # experimental and unstable nature of the feature. Once a feature becomes 9 | # more stable and is a candidate for inclusion in the core gem it should 10 | # be moved up to the `Concurrent` module, where it would reside once merged 11 | # into the core gem. 12 | # 13 | # The only exception to this is for features which *replace* features from 14 | # the core gem in ways that are breaking and not backward compatible. These 15 | # features should remain in this module until merged into the core gem. This 16 | # will prevent namespace collisions. 17 | # 18 | # @!macro warn.edge 19 | module Edge 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/window.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | module Edge 3 | class LockFreeLinkedSet 4 | class Window 5 | attr_accessor :pred, :curr 6 | 7 | def initialize(pred, curr) 8 | @pred, @curr = pred, curr 9 | end 10 | 11 | # This method is used to find a 'window' for which `add` and `remove` 12 | # methods can use to know where to add and remove from the list. However, 13 | # it has another responsibilility, which is to physically unlink any 14 | # nodes marked for removal in the set. This prevents adds/removes from 15 | # having to retraverse the list to physically unlink nodes. 16 | def self.find(head, item) 17 | loop do 18 | break_inner_loops = false 19 | pred = head 20 | curr = pred.next_node 21 | 22 | loop do 23 | succ, marked = curr.successor_reference.get 24 | 25 | # Remove sequence of marked nodes 26 | while marked 27 | removed = pred.successor_reference.compare_and_set curr, succ, false, false 28 | 29 | # If could not remove node, try again 30 | break_inner_loops = true && break unless removed 31 | 32 | curr = succ 33 | succ, marked = curr.successor_reference.get 34 | end 35 | 36 | break if break_inner_loops 37 | 38 | # We have found a window 39 | return new pred, curr if curr >= item 40 | 41 | pred = curr 42 | curr = succ 43 | end 44 | end 45 | end 46 | end 47 | end 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/edge/old_channel_integration.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/promises' 2 | 3 | module Concurrent 4 | module Promises 5 | module FactoryMethods 6 | 7 | # @!visibility private 8 | 9 | module OldChannelIntegration 10 | 11 | # @!visibility private 12 | 13 | # only proof of concept 14 | # @return [Future] 15 | def select(*channels) 16 | # TODO (pitr-ch 26-Mar-2016): re-do, has to be non-blocking 17 | future do 18 | # noinspection RubyArgCount 19 | Channel.select do |s| 20 | channels.each do |ch| 21 | s.take(ch) { |value| [value, ch] } 22 | end 23 | end 24 | end 25 | end 26 | end 27 | 28 | include OldChannelIntegration 29 | end 30 | 31 | class Future < AbstractEventFuture 32 | 33 | # @!visibility private 34 | 35 | module OldChannelIntegration 36 | 37 | # @!visibility private 38 | 39 | # Zips with selected value form the supplied channels 40 | # @return [Future] 41 | def then_select(*channels) 42 | future = Concurrent::Promises.select(*channels) 43 | ZipFuturesPromise.new_blocked_by2(self, future, @DefaultExecutor).future 44 | end 45 | 46 | # @note may block 47 | # @note only proof of concept 48 | def then_put(channel) 49 | on_fulfillment_using(:io, channel) { |value, channel| channel.put value } 50 | end 51 | end 52 | 53 | include OldChannelIntegration 54 | end 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/edge/version.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | EDGE_VERSION = '0.7.2' 3 | end 4 | -------------------------------------------------------------------------------- /lib/concurrent-ruby-edge/concurrent/executor/wrapping_executor.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/object' 2 | require 'concurrent/executor/executor_service' 3 | 4 | module Concurrent 5 | 6 | # A delegating executor which modifies each task with arguments 7 | # before the task is given to the target executor it delegates to. 8 | # @example Count task executions 9 | # counter = AtomicFixnum.new 10 | # count_executions = WrappingExecutor.new Concurrent.global_io_executor do |*args, &task| 11 | # [*args, -> *args { counter.increment; task.call *args }] 12 | # end 13 | # 10.times { count_executions.post { :do_something } } 14 | # sleep 0.01 15 | # counter.value #=> 10 16 | class WrappingExecutor < Synchronization::Object 17 | safe_initialization! 18 | 19 | include ExecutorService 20 | 21 | # @param [Executor] executor an executor to delegate the tasks to 22 | # @yield [*args, &task] A function which can modify the task with arguments 23 | # @yieldparam [Array] *args the arguments submitted with the tasks 24 | # @yieldparam [block] &task the task submitted to the executor to be modified 25 | # @yieldreturn [Array] a new arguments and task `[*args, task]` which are submitted to the target executor 26 | def initialize(executor, &wrapper) 27 | super() 28 | @Wrapper = wrapper 29 | @Executor = executor 30 | end 31 | 32 | # @!macro executor_service_method_post 33 | # 34 | # @see #initialize how the tasks can be modified 35 | def post(*args, &task) 36 | *args, task = @Wrapper.call(*args, &task) 37 | @Executor.post(*args, &task) 38 | end 39 | 40 | # @!macro executor_service_method_can_overflow_question 41 | def can_overflow? 42 | @Executor.can_overflow? 43 | end 44 | 45 | # @!macro executor_service_method_serialized_question 46 | def serialized? 47 | @Executor.serialized? 48 | end 49 | end 50 | end 51 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent-ruby.rb: -------------------------------------------------------------------------------- 1 | # This file is here so that there is a file with the same name as the gem that 2 | # can be required by Bundler.require. Applications should normally 3 | # require 'concurrent'. 4 | 5 | require_relative "concurrent" 6 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/lib/concurrent-ruby/concurrent/.gitignore -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomic/java_count_down_latch.rb: -------------------------------------------------------------------------------- 1 | if Concurrent.on_jruby? 2 | require 'concurrent/utility/native_extension_loader' 3 | 4 | module Concurrent 5 | 6 | # @!macro count_down_latch 7 | # @!visibility private 8 | # @!macro internal_implementation_note 9 | class JavaCountDownLatch 10 | 11 | # @!macro count_down_latch_method_initialize 12 | def initialize(count = 1) 13 | Utility::NativeInteger.ensure_integer_and_bounds(count) 14 | Utility::NativeInteger.ensure_positive(count) 15 | @latch = java.util.concurrent.CountDownLatch.new(count) 16 | end 17 | 18 | # @!macro count_down_latch_method_wait 19 | def wait(timeout = nil) 20 | result = nil 21 | if timeout.nil? 22 | Synchronization::JRuby.sleep_interruptibly { @latch.await } 23 | result = true 24 | else 25 | Synchronization::JRuby.sleep_interruptibly do 26 | result = @latch.await(1000 * timeout, java.util.concurrent.TimeUnit::MILLISECONDS) 27 | end 28 | end 29 | result 30 | end 31 | 32 | # @!macro count_down_latch_method_count_down 33 | def count_down 34 | @latch.countDown 35 | end 36 | 37 | # @!macro count_down_latch_method_count 38 | def count 39 | @latch.getCount 40 | end 41 | end 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomic/lock_local_var.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/utility/engine' 2 | require_relative 'fiber_local_var' 3 | require_relative 'thread_local_var' 4 | 5 | module Concurrent 6 | # @!visibility private 7 | def self.mutex_owned_per_thread? 8 | return false if Concurrent.on_jruby? || Concurrent.on_truffleruby? 9 | 10 | mutex = Mutex.new 11 | # Lock the mutex: 12 | mutex.synchronize do 13 | # Check if the mutex is still owned in a child fiber: 14 | Fiber.new { mutex.owned? }.resume 15 | end 16 | end 17 | 18 | if mutex_owned_per_thread? 19 | LockLocalVar = ThreadLocalVar 20 | else 21 | LockLocalVar = FiberLocalVar 22 | end 23 | 24 | # Either {FiberLocalVar} or {ThreadLocalVar} depending on whether Mutex (and Monitor) 25 | # are held, respectively, per Fiber or per Thread. 26 | class LockLocalVar 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomic/mutex_atomic_boolean.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/safe_initialization' 2 | 3 | module Concurrent 4 | 5 | # @!macro atomic_boolean 6 | # @!visibility private 7 | # @!macro internal_implementation_note 8 | class MutexAtomicBoolean 9 | extend Concurrent::Synchronization::SafeInitialization 10 | 11 | # @!macro atomic_boolean_method_initialize 12 | def initialize(initial = false) 13 | super() 14 | @Lock = ::Mutex.new 15 | @value = !!initial 16 | end 17 | 18 | # @!macro atomic_boolean_method_value_get 19 | def value 20 | synchronize { @value } 21 | end 22 | 23 | # @!macro atomic_boolean_method_value_set 24 | def value=(value) 25 | synchronize { @value = !!value } 26 | end 27 | 28 | # @!macro atomic_boolean_method_true_question 29 | def true? 30 | synchronize { @value } 31 | end 32 | 33 | # @!macro atomic_boolean_method_false_question 34 | def false? 35 | synchronize { !@value } 36 | end 37 | 38 | # @!macro atomic_boolean_method_make_true 39 | def make_true 40 | synchronize { ns_make_value(true) } 41 | end 42 | 43 | # @!macro atomic_boolean_method_make_false 44 | def make_false 45 | synchronize { ns_make_value(false) } 46 | end 47 | 48 | protected 49 | 50 | # @!visibility private 51 | def synchronize 52 | if @Lock.owned? 53 | yield 54 | else 55 | @Lock.synchronize { yield } 56 | end 57 | end 58 | 59 | private 60 | 61 | # @!visibility private 62 | def ns_make_value(value) 63 | old = @value 64 | @value = value 65 | old != @value 66 | end 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomic/mutex_atomic_fixnum.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/safe_initialization' 2 | require 'concurrent/utility/native_integer' 3 | 4 | module Concurrent 5 | 6 | # @!macro atomic_fixnum 7 | # @!visibility private 8 | # @!macro internal_implementation_note 9 | class MutexAtomicFixnum 10 | extend Concurrent::Synchronization::SafeInitialization 11 | 12 | # @!macro atomic_fixnum_method_initialize 13 | def initialize(initial = 0) 14 | super() 15 | @Lock = ::Mutex.new 16 | ns_set(initial) 17 | end 18 | 19 | # @!macro atomic_fixnum_method_value_get 20 | def value 21 | synchronize { @value } 22 | end 23 | 24 | # @!macro atomic_fixnum_method_value_set 25 | def value=(value) 26 | synchronize { ns_set(value) } 27 | end 28 | 29 | # @!macro atomic_fixnum_method_increment 30 | def increment(delta = 1) 31 | synchronize { ns_set(@value + delta.to_i) } 32 | end 33 | 34 | alias_method :up, :increment 35 | 36 | # @!macro atomic_fixnum_method_decrement 37 | def decrement(delta = 1) 38 | synchronize { ns_set(@value - delta.to_i) } 39 | end 40 | 41 | alias_method :down, :decrement 42 | 43 | # @!macro atomic_fixnum_method_compare_and_set 44 | def compare_and_set(expect, update) 45 | synchronize do 46 | if @value == expect.to_i 47 | @value = update.to_i 48 | true 49 | else 50 | false 51 | end 52 | end 53 | end 54 | 55 | # @!macro atomic_fixnum_method_update 56 | def update 57 | synchronize do 58 | @value = yield @value 59 | end 60 | end 61 | 62 | protected 63 | 64 | # @!visibility private 65 | def synchronize 66 | if @Lock.owned? 67 | yield 68 | else 69 | @Lock.synchronize { yield } 70 | end 71 | end 72 | 73 | private 74 | 75 | # @!visibility private 76 | def ns_set(value) 77 | Utility::NativeInteger.ensure_integer_and_bounds value 78 | @value = value 79 | end 80 | end 81 | end 82 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomic/mutex_count_down_latch.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/lockable_object' 2 | require 'concurrent/utility/native_integer' 3 | 4 | module Concurrent 5 | 6 | # @!macro count_down_latch 7 | # @!visibility private 8 | # @!macro internal_implementation_note 9 | class MutexCountDownLatch < Synchronization::LockableObject 10 | 11 | # @!macro count_down_latch_method_initialize 12 | def initialize(count = 1) 13 | Utility::NativeInteger.ensure_integer_and_bounds count 14 | Utility::NativeInteger.ensure_positive count 15 | 16 | super() 17 | synchronize { ns_initialize count } 18 | end 19 | 20 | # @!macro count_down_latch_method_wait 21 | def wait(timeout = nil) 22 | synchronize { ns_wait_until(timeout) { @count == 0 } } 23 | end 24 | 25 | # @!macro count_down_latch_method_count_down 26 | def count_down 27 | synchronize do 28 | @count -= 1 if @count > 0 29 | ns_broadcast if @count == 0 30 | end 31 | end 32 | 33 | # @!macro count_down_latch_method_count 34 | def count 35 | synchronize { @count } 36 | end 37 | 38 | protected 39 | 40 | def ns_initialize(count) 41 | @count = count 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomic_reference/atomic_direct_update.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/errors' 2 | 3 | module Concurrent 4 | 5 | # Define update methods that use direct paths 6 | # 7 | # @!visibility private 8 | # @!macro internal_implementation_note 9 | module AtomicDirectUpdate 10 | def update 11 | true until compare_and_set(old_value = get, new_value = yield(old_value)) 12 | new_value 13 | end 14 | 15 | def try_update 16 | old_value = get 17 | new_value = yield old_value 18 | 19 | return unless compare_and_set old_value, new_value 20 | 21 | new_value 22 | end 23 | 24 | def try_update! 25 | old_value = get 26 | new_value = yield old_value 27 | unless compare_and_set(old_value, new_value) 28 | if $VERBOSE 29 | raise ConcurrentUpdateError, "Update failed" 30 | else 31 | raise ConcurrentUpdateError, "Update failed", ConcurrentUpdateError::CONC_UP_ERR_BACKTRACE 32 | end 33 | end 34 | new_value 35 | end 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomic_reference/mutex_atomic.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/atomic_reference/atomic_direct_update' 2 | require 'concurrent/atomic_reference/numeric_cas_wrapper' 3 | require 'concurrent/synchronization/safe_initialization' 4 | 5 | module Concurrent 6 | 7 | # @!visibility private 8 | # @!macro internal_implementation_note 9 | class MutexAtomicReference 10 | extend Concurrent::Synchronization::SafeInitialization 11 | include AtomicDirectUpdate 12 | include AtomicNumericCompareAndSetWrapper 13 | alias_method :compare_and_swap, :compare_and_set 14 | 15 | # @!macro atomic_reference_method_initialize 16 | def initialize(value = nil) 17 | super() 18 | @Lock = ::Mutex.new 19 | @value = value 20 | end 21 | 22 | # @!macro atomic_reference_method_get 23 | def get 24 | synchronize { @value } 25 | end 26 | alias_method :value, :get 27 | 28 | # @!macro atomic_reference_method_set 29 | def set(new_value) 30 | synchronize { @value = new_value } 31 | end 32 | alias_method :value=, :set 33 | 34 | # @!macro atomic_reference_method_get_and_set 35 | def get_and_set(new_value) 36 | synchronize do 37 | old_value = @value 38 | @value = new_value 39 | old_value 40 | end 41 | end 42 | alias_method :swap, :get_and_set 43 | 44 | # @!macro atomic_reference_method_compare_and_set 45 | def _compare_and_set(old_value, new_value) 46 | synchronize do 47 | if @value.equal? old_value 48 | @value = new_value 49 | true 50 | else 51 | false 52 | end 53 | end 54 | end 55 | 56 | protected 57 | 58 | # @!visibility private 59 | def synchronize 60 | if @Lock.owned? 61 | yield 62 | else 63 | @Lock.synchronize { yield } 64 | end 65 | end 66 | end 67 | end 68 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomic_reference/numeric_cas_wrapper.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | 3 | # Special "compare and set" handling of numeric values. 4 | # 5 | # @!visibility private 6 | # @!macro internal_implementation_note 7 | module AtomicNumericCompareAndSetWrapper 8 | 9 | # @!macro atomic_reference_method_compare_and_set 10 | def compare_and_set(old_value, new_value) 11 | if old_value.kind_of? Numeric 12 | while true 13 | old = get 14 | 15 | return false unless old.kind_of? Numeric 16 | 17 | return false unless old == old_value 18 | 19 | result = _compare_and_set(old, new_value) 20 | return result if result 21 | end 22 | else 23 | _compare_and_set(old_value, new_value) 24 | end 25 | end 26 | 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/atomics.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/atomic/atomic_reference' 2 | require 'concurrent/atomic/atomic_boolean' 3 | require 'concurrent/atomic/atomic_fixnum' 4 | require 'concurrent/atomic/cyclic_barrier' 5 | require 'concurrent/atomic/count_down_latch' 6 | require 'concurrent/atomic/event' 7 | require 'concurrent/atomic/read_write_lock' 8 | require 'concurrent/atomic/reentrant_read_write_lock' 9 | require 'concurrent/atomic/semaphore' 10 | require 'concurrent/atomic/thread_local_var' 11 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/collection/map/mri_map_backend.rb: -------------------------------------------------------------------------------- 1 | require 'thread' 2 | require 'concurrent/collection/map/non_concurrent_map_backend' 3 | 4 | module Concurrent 5 | 6 | # @!visibility private 7 | module Collection 8 | 9 | # @!visibility private 10 | class MriMapBackend < NonConcurrentMapBackend 11 | 12 | def initialize(options = nil, &default_proc) 13 | super(options, &default_proc) 14 | @write_lock = Mutex.new 15 | end 16 | 17 | def []=(key, value) 18 | @write_lock.synchronize { super } 19 | end 20 | 21 | def compute_if_absent(key) 22 | if NULL != (stored_value = @backend.fetch(key, NULL)) # fast non-blocking path for the most likely case 23 | stored_value 24 | else 25 | @write_lock.synchronize { super } 26 | end 27 | end 28 | 29 | def compute_if_present(key) 30 | @write_lock.synchronize { super } 31 | end 32 | 33 | def compute(key) 34 | @write_lock.synchronize { super } 35 | end 36 | 37 | def merge_pair(key, value) 38 | @write_lock.synchronize { super } 39 | end 40 | 41 | def replace_pair(key, old_value, new_value) 42 | @write_lock.synchronize { super } 43 | end 44 | 45 | def replace_if_exists(key, new_value) 46 | @write_lock.synchronize { super } 47 | end 48 | 49 | def get_and_set(key, value) 50 | @write_lock.synchronize { super } 51 | end 52 | 53 | def delete(key) 54 | @write_lock.synchronize { super } 55 | end 56 | 57 | def delete_pair(key, value) 58 | @write_lock.synchronize { super } 59 | end 60 | 61 | def clear 62 | @write_lock.synchronize { super } 63 | end 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/collection/map/synchronized_map_backend.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/collection/map/non_concurrent_map_backend' 2 | 3 | module Concurrent 4 | 5 | # @!visibility private 6 | module Collection 7 | 8 | # @!visibility private 9 | class SynchronizedMapBackend < NonConcurrentMapBackend 10 | 11 | def initialize(*args, &block) 12 | super 13 | 14 | # WARNING: Mutex is a non-reentrant lock, so the synchronized methods are 15 | # not allowed to call each other. 16 | @mutex = Mutex.new 17 | end 18 | 19 | def [](key) 20 | @mutex.synchronize { super } 21 | end 22 | 23 | def []=(key, value) 24 | @mutex.synchronize { super } 25 | end 26 | 27 | def compute_if_absent(key) 28 | @mutex.synchronize { super } 29 | end 30 | 31 | def compute_if_present(key) 32 | @mutex.synchronize { super } 33 | end 34 | 35 | def compute(key) 36 | @mutex.synchronize { super } 37 | end 38 | 39 | def merge_pair(key, value) 40 | @mutex.synchronize { super } 41 | end 42 | 43 | def replace_pair(key, old_value, new_value) 44 | @mutex.synchronize { super } 45 | end 46 | 47 | def replace_if_exists(key, new_value) 48 | @mutex.synchronize { super } 49 | end 50 | 51 | def get_and_set(key, value) 52 | @mutex.synchronize { super } 53 | end 54 | 55 | def key?(key) 56 | @mutex.synchronize { super } 57 | end 58 | 59 | def delete(key) 60 | @mutex.synchronize { super } 61 | end 62 | 63 | def delete_pair(key, value) 64 | @mutex.synchronize { super } 65 | end 66 | 67 | def clear 68 | @mutex.synchronize { super } 69 | end 70 | 71 | def size 72 | @mutex.synchronize { super } 73 | end 74 | 75 | def get_or_default(key, default_value) 76 | @mutex.synchronize { super } 77 | end 78 | 79 | private 80 | def dupped_backend 81 | @mutex.synchronize { super } 82 | end 83 | end 84 | end 85 | end 86 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/collection/map/truffleruby_map_backend.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | 3 | # @!visibility private 4 | module Collection 5 | 6 | # @!visibility private 7 | class TruffleRubyMapBackend < TruffleRuby::ConcurrentMap 8 | def initialize(options = nil) 9 | options ||= {} 10 | super(initial_capacity: options[:initial_capacity], load_factor: options[:load_factor]) 11 | end 12 | end 13 | end 14 | end 15 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/concern/deprecation.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/concern/logging' 2 | 3 | module Concurrent 4 | module Concern 5 | 6 | # @!visibility private 7 | # @!macro internal_implementation_note 8 | module Deprecation 9 | # TODO require additional parameter: a version. Display when it'll be removed based on that. Error if not removed. 10 | include Concern::Logging 11 | 12 | def deprecated(message, strip = 2) 13 | caller_line = caller(strip).first if strip > 0 14 | klass = if Module === self 15 | self 16 | else 17 | self.class 18 | end 19 | message = if strip > 0 20 | format("[DEPRECATED] %s\ncalled on: %s", message, caller_line) 21 | else 22 | format('[DEPRECATED] %s', message) 23 | end 24 | log WARN, klass.to_s, message 25 | end 26 | 27 | def deprecated_method(old_name, new_name) 28 | deprecated "`#{old_name}` is deprecated and it'll removed in next release, use `#{new_name}` instead", 3 29 | end 30 | 31 | extend self 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/constants.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | 3 | # Various classes within allows for +nil+ values to be stored, 4 | # so a special +NULL+ token is required to indicate the "nil-ness". 5 | # @!visibility private 6 | NULL = ::Object.new 7 | 8 | end 9 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executor/immediate_executor.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/atomic/event' 2 | require 'concurrent/executor/abstract_executor_service' 3 | require 'concurrent/executor/serial_executor_service' 4 | 5 | module Concurrent 6 | 7 | # An executor service which runs all operations on the current thread, 8 | # blocking as necessary. Operations are performed in the order they are 9 | # received and no two operations can be performed simultaneously. 10 | # 11 | # This executor service exists mainly for testing an debugging. When used 12 | # it immediately runs every `#post` operation on the current thread, blocking 13 | # that thread until the operation is complete. This can be very beneficial 14 | # during testing because it makes all operations deterministic. 15 | # 16 | # @note Intended for use primarily in testing and debugging. 17 | class ImmediateExecutor < AbstractExecutorService 18 | include SerialExecutorService 19 | 20 | # Creates a new executor 21 | def initialize 22 | @stopped = Concurrent::Event.new 23 | end 24 | 25 | # @!macro executor_service_method_post 26 | def post(*args, &task) 27 | raise ArgumentError.new('no block given') unless block_given? 28 | return false unless running? 29 | task.call(*args) 30 | true 31 | end 32 | 33 | # @!macro executor_service_method_left_shift 34 | def <<(task) 35 | post(&task) 36 | self 37 | end 38 | 39 | # @!macro executor_service_method_running_question 40 | def running? 41 | ! shutdown? 42 | end 43 | 44 | # @!macro executor_service_method_shuttingdown_question 45 | def shuttingdown? 46 | false 47 | end 48 | 49 | # @!macro executor_service_method_shutdown_question 50 | def shutdown? 51 | @stopped.set? 52 | end 53 | 54 | # @!macro executor_service_method_shutdown 55 | def shutdown 56 | @stopped.set 57 | true 58 | end 59 | alias_method :kill, :shutdown 60 | 61 | # @!macro executor_service_method_wait_for_termination 62 | def wait_for_termination(timeout = nil) 63 | @stopped.wait(timeout) 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executor/indirect_immediate_executor.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/immediate_executor' 2 | require 'concurrent/executor/simple_executor_service' 3 | 4 | module Concurrent 5 | # An executor service which runs all operations on a new thread, blocking 6 | # until it completes. Operations are performed in the order they are received 7 | # and no two operations can be performed simultaneously. 8 | # 9 | # This executor service exists mainly for testing an debugging. When used it 10 | # immediately runs every `#post` operation on a new thread, blocking the 11 | # current thread until the operation is complete. This is similar to how the 12 | # ImmediateExecutor works, but the operation has the full stack of the new 13 | # thread at its disposal. This can be helpful when the operations will spawn 14 | # more operations on the same executor and so on - such a situation might 15 | # overflow the single stack in case of an ImmediateExecutor, which is 16 | # inconsistent with how it would behave for a threaded executor. 17 | # 18 | # @note Intended for use primarily in testing and debugging. 19 | class IndirectImmediateExecutor < ImmediateExecutor 20 | # Creates a new executor 21 | def initialize 22 | super 23 | @internal_executor = SimpleExecutorService.new 24 | end 25 | 26 | # @!macro executor_service_method_post 27 | def post(*args, &task) 28 | raise ArgumentError.new("no block given") unless block_given? 29 | return false unless running? 30 | 31 | event = Concurrent::Event.new 32 | @internal_executor.post do 33 | begin 34 | task.call(*args) 35 | ensure 36 | event.set 37 | end 38 | end 39 | event.wait 40 | 41 | true 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executor/java_single_thread_executor.rb: -------------------------------------------------------------------------------- 1 | if Concurrent.on_jruby? 2 | 3 | require 'concurrent/executor/java_executor_service' 4 | require 'concurrent/executor/serial_executor_service' 5 | 6 | module Concurrent 7 | 8 | # @!macro single_thread_executor 9 | # @!macro abstract_executor_service_public_api 10 | # @!visibility private 11 | class JavaSingleThreadExecutor < JavaExecutorService 12 | include SerialExecutorService 13 | 14 | # @!macro single_thread_executor_method_initialize 15 | def initialize(opts = {}) 16 | super(opts) 17 | end 18 | 19 | private 20 | 21 | def ns_initialize(opts) 22 | @executor = java.util.concurrent.Executors.newSingleThreadExecutor( 23 | DaemonThreadFactory.new(ns_auto_terminate?) 24 | ) 25 | @fallback_policy = opts.fetch(:fallback_policy, :discard) 26 | raise ArgumentError.new("#{@fallback_policy} is not a valid fallback policy") unless FALLBACK_POLICY_CLASSES.keys.include?(@fallback_policy) 27 | end 28 | end 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executor/ruby_executor_service.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/abstract_executor_service' 2 | require 'concurrent/atomic/event' 3 | 4 | module Concurrent 5 | 6 | # @!macro abstract_executor_service_public_api 7 | # @!visibility private 8 | class RubyExecutorService < AbstractExecutorService 9 | safe_initialization! 10 | 11 | def initialize(*args, &block) 12 | super 13 | @StopEvent = Event.new 14 | @StoppedEvent = Event.new 15 | end 16 | 17 | def post(*args, &task) 18 | raise ArgumentError.new('no block given') unless block_given? 19 | deferred_action = synchronize { 20 | if running? 21 | ns_execute(*args, &task) 22 | else 23 | fallback_action(*args, &task) 24 | end 25 | } 26 | if deferred_action 27 | deferred_action.call 28 | else 29 | true 30 | end 31 | end 32 | 33 | def shutdown 34 | synchronize do 35 | break unless running? 36 | stop_event.set 37 | ns_shutdown_execution 38 | end 39 | true 40 | end 41 | 42 | def kill 43 | synchronize do 44 | break if shutdown? 45 | stop_event.set 46 | ns_kill_execution 47 | stopped_event.set 48 | end 49 | true 50 | end 51 | 52 | def wait_for_termination(timeout = nil) 53 | stopped_event.wait(timeout) 54 | end 55 | 56 | private 57 | 58 | def stop_event 59 | @StopEvent 60 | end 61 | 62 | def stopped_event 63 | @StoppedEvent 64 | end 65 | 66 | def ns_shutdown_execution 67 | stopped_event.set 68 | end 69 | 70 | def ns_running? 71 | !stop_event.set? 72 | end 73 | 74 | def ns_shuttingdown? 75 | !(ns_running? || ns_shutdown?) 76 | end 77 | 78 | def ns_shutdown? 79 | stopped_event.set? 80 | end 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executor/ruby_single_thread_executor.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/ruby_thread_pool_executor' 2 | 3 | module Concurrent 4 | 5 | # @!macro single_thread_executor 6 | # @!macro abstract_executor_service_public_api 7 | # @!visibility private 8 | class RubySingleThreadExecutor < RubyThreadPoolExecutor 9 | 10 | # @!macro single_thread_executor_method_initialize 11 | def initialize(opts = {}) 12 | super( 13 | min_threads: 1, 14 | max_threads: 1, 15 | max_queue: 0, 16 | idletime: DEFAULT_THREAD_IDLETIMEOUT, 17 | fallback_policy: opts.fetch(:fallback_policy, :discard), 18 | ) 19 | end 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executor/safe_task_executor.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/lockable_object' 2 | 3 | module Concurrent 4 | 5 | # A simple utility class that executes a callable and returns and array of three elements: 6 | # success - indicating if the callable has been executed without errors 7 | # value - filled by the callable result if it has been executed without errors, nil otherwise 8 | # reason - the error risen by the callable if it has been executed with errors, nil otherwise 9 | class SafeTaskExecutor < Synchronization::LockableObject 10 | 11 | def initialize(task, opts = {}) 12 | @task = task 13 | @exception_class = opts.fetch(:rescue_exception, false) ? Exception : StandardError 14 | super() # ensures visibility 15 | end 16 | 17 | # @return [Array] 18 | def execute(*args) 19 | success = true 20 | value = reason = nil 21 | 22 | synchronize do 23 | begin 24 | value = @task.call(*args) 25 | success = true 26 | rescue @exception_class => ex 27 | reason = ex 28 | success = false 29 | end 30 | end 31 | 32 | [success, value, reason] 33 | end 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executor/serial_executor_service.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/executor_service' 2 | 3 | module Concurrent 4 | 5 | # Indicates that the including `ExecutorService` guarantees 6 | # that all operations will occur in the order they are post and that no 7 | # two operations may occur simultaneously. This module provides no 8 | # functionality and provides no guarantees. That is the responsibility 9 | # of the including class. This module exists solely to allow the including 10 | # object to be interrogated for its serialization status. 11 | # 12 | # @example 13 | # class Foo 14 | # include Concurrent::SerialExecutor 15 | # end 16 | # 17 | # foo = Foo.new 18 | # 19 | # foo.is_a? Concurrent::ExecutorService #=> true 20 | # foo.is_a? Concurrent::SerialExecutor #=> true 21 | # foo.serialized? #=> true 22 | # 23 | # @!visibility private 24 | module SerialExecutorService 25 | include ExecutorService 26 | 27 | # @!macro executor_service_method_serialized_question 28 | # 29 | # @note Always returns `true` 30 | def serialized? 31 | true 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executor/serialized_execution_delegator.rb: -------------------------------------------------------------------------------- 1 | require 'delegate' 2 | require 'concurrent/executor/serial_executor_service' 3 | require 'concurrent/executor/serialized_execution' 4 | 5 | module Concurrent 6 | 7 | # A wrapper/delegator for any `ExecutorService` that 8 | # guarantees serialized execution of tasks. 9 | # 10 | # @see [SimpleDelegator](http://www.ruby-doc.org/stdlib-2.1.2/libdoc/delegate/rdoc/SimpleDelegator.html) 11 | # @see Concurrent::SerializedExecution 12 | class SerializedExecutionDelegator < SimpleDelegator 13 | include SerialExecutorService 14 | 15 | def initialize(executor) 16 | @executor = executor 17 | @serializer = SerializedExecution.new 18 | super(executor) 19 | end 20 | 21 | # @!macro executor_service_method_post 22 | def post(*args, &task) 23 | raise ArgumentError.new('no block given') unless block_given? 24 | return false unless running? 25 | @serializer.post(@executor, *args, &task) 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/executors.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/abstract_executor_service' 2 | require 'concurrent/executor/cached_thread_pool' 3 | require 'concurrent/executor/executor_service' 4 | require 'concurrent/executor/fixed_thread_pool' 5 | require 'concurrent/executor/immediate_executor' 6 | require 'concurrent/executor/indirect_immediate_executor' 7 | require 'concurrent/executor/java_executor_service' 8 | require 'concurrent/executor/java_single_thread_executor' 9 | require 'concurrent/executor/java_thread_pool_executor' 10 | require 'concurrent/executor/ruby_executor_service' 11 | require 'concurrent/executor/ruby_single_thread_executor' 12 | require 'concurrent/executor/ruby_thread_pool_executor' 13 | require 'concurrent/executor/cached_thread_pool' 14 | require 'concurrent/executor/safe_task_executor' 15 | require 'concurrent/executor/serial_executor_service' 16 | require 'concurrent/executor/serialized_execution' 17 | require 'concurrent/executor/serialized_execution_delegator' 18 | require 'concurrent/executor/single_thread_executor' 19 | require 'concurrent/executor/thread_pool_executor' 20 | require 'concurrent/executor/timer_set' 21 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/hash.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/utility/engine' 2 | require 'concurrent/thread_safe/util' 3 | 4 | module Concurrent 5 | 6 | # @!macro concurrent_hash 7 | # 8 | # A thread-safe subclass of Hash. This version locks against the object 9 | # itself for every method call, ensuring only one thread can be reading 10 | # or writing at a time. This includes iteration methods like `#each`, 11 | # which takes the lock repeatedly when reading an item. 12 | # 13 | # @see http://ruby-doc.org/core/Hash.html Ruby standard library `Hash` 14 | 15 | # @!macro internal_implementation_note 16 | HashImplementation = case 17 | when Concurrent.on_cruby? 18 | # Hash is not fully thread-safe on CRuby, see 19 | # https://bugs.ruby-lang.org/issues/19237 20 | # https://github.com/ruby/ruby/commit/ffd52412ab 21 | # https://github.com/ruby-concurrency/concurrent-ruby/issues/929 22 | # So we will need to add synchronization here (similar to Concurrent::Map). 23 | ::Hash 24 | 25 | when Concurrent.on_jruby? 26 | require 'jruby/synchronized' 27 | 28 | class JRubyHash < ::Hash 29 | include JRuby::Synchronized 30 | end 31 | JRubyHash 32 | 33 | when Concurrent.on_truffleruby? 34 | require 'concurrent/thread_safe/util/data_structures' 35 | 36 | class TruffleRubyHash < ::Hash 37 | end 38 | 39 | ThreadSafe::Util.make_synchronized_on_truffleruby TruffleRubyHash 40 | TruffleRubyHash 41 | 42 | else 43 | warn 'Possibly unsupported Ruby implementation' 44 | ::Hash 45 | end 46 | private_constant :HashImplementation 47 | 48 | # @!macro concurrent_hash 49 | class Hash < HashImplementation 50 | end 51 | 52 | end 53 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/options.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/configuration' 2 | 3 | module Concurrent 4 | 5 | # @!visibility private 6 | module Options 7 | 8 | # Get the requested `Executor` based on the values set in the options hash. 9 | # 10 | # @param [Hash] opts the options defining the requested executor 11 | # @option opts [Executor] :executor when set use the given `Executor` instance. 12 | # Three special values are also supported: `:fast` returns the global fast executor, 13 | # `:io` returns the global io executor, and `:immediate` returns a new 14 | # `ImmediateExecutor` object. 15 | # 16 | # @return [Executor, nil] the requested thread pool, or nil when no option specified 17 | # 18 | # @!visibility private 19 | def self.executor_from_options(opts = {}) # :nodoc: 20 | if identifier = opts.fetch(:executor, nil) 21 | executor(identifier) 22 | else 23 | nil 24 | end 25 | end 26 | 27 | def self.executor(executor_identifier) 28 | case executor_identifier 29 | when :fast 30 | Concurrent.global_fast_executor 31 | when :io 32 | Concurrent.global_io_executor 33 | when :immediate 34 | Concurrent.global_immediate_executor 35 | when Concurrent::ExecutorService 36 | executor_identifier 37 | else 38 | raise ArgumentError, "executor not recognized by '#{executor_identifier}'" 39 | end 40 | end 41 | end 42 | end 43 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/re_include.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | 3 | # Methods form module A included to a module B, which is already included into class C, 4 | # will not be visible in the C class. If this module is extended to B then A's methods 5 | # are correctly made visible to C. 6 | # 7 | # @example 8 | # module A 9 | # def a 10 | # :a 11 | # end 12 | # end 13 | # 14 | # module B1 15 | # end 16 | # 17 | # class C1 18 | # include B1 19 | # end 20 | # 21 | # module B2 22 | # extend Concurrent::ReInclude 23 | # end 24 | # 25 | # class C2 26 | # include B2 27 | # end 28 | # 29 | # B1.send :include, A 30 | # B2.send :include, A 31 | # 32 | # C1.new.respond_to? :a # => false 33 | # C2.new.respond_to? :a # => true 34 | # 35 | # @!visibility private 36 | module ReInclude 37 | # @!visibility private 38 | def included(base) 39 | (@re_include_to_bases ||= []) << [:include, base] 40 | super(base) 41 | end 42 | 43 | # @!visibility private 44 | def extended(base) 45 | (@re_include_to_bases ||= []) << [:extend, base] 46 | super(base) 47 | end 48 | 49 | # @!visibility private 50 | def include(*modules) 51 | result = super(*modules) 52 | modules.reverse.each do |module_being_included| 53 | (@re_include_to_bases ||= []).each do |method, mod| 54 | mod.send method, module_being_included 55 | end 56 | end 57 | result 58 | end 59 | end 60 | end 61 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/synchronization.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/utility/native_extension_loader' # load native parts first 2 | 3 | require 'concurrent/synchronization/object' 4 | require 'concurrent/synchronization/lockable_object' 5 | require 'concurrent/synchronization/condition' 6 | require 'concurrent/synchronization/lock' 7 | 8 | module Concurrent 9 | # @!visibility private 10 | module Synchronization 11 | end 12 | end 13 | 14 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/synchronization/abstract_object.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | module Synchronization 3 | 4 | # @!visibility private 5 | # @!macro internal_implementation_note 6 | class AbstractObject 7 | def initialize 8 | # nothing to do 9 | end 10 | 11 | # @!visibility private 12 | # @abstract 13 | def full_memory_barrier 14 | raise NotImplementedError 15 | end 16 | 17 | def self.attr_volatile(*names) 18 | raise NotImplementedError 19 | end 20 | end 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/synchronization/condition.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/lockable_object' 2 | 3 | module Concurrent 4 | module Synchronization 5 | 6 | # @!visibility private 7 | # TODO (pitr-ch 04-Dec-2016): should be in edge 8 | class Condition < LockableObject 9 | safe_initialization! 10 | 11 | # TODO (pitr 12-Sep-2015): locks two objects, improve 12 | # TODO (pitr 26-Sep-2015): study 13 | # http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/util/concurrent/locks/AbstractQueuedSynchronizer.java#AbstractQueuedSynchronizer.Node 14 | 15 | singleton_class.send :alias_method, :private_new, :new 16 | private_class_method :new 17 | 18 | def initialize(lock) 19 | super() 20 | @Lock = lock 21 | end 22 | 23 | def wait(timeout = nil) 24 | @Lock.synchronize { ns_wait(timeout) } 25 | end 26 | 27 | def ns_wait(timeout = nil) 28 | synchronize { super(timeout) } 29 | end 30 | 31 | def wait_until(timeout = nil, &condition) 32 | @Lock.synchronize { ns_wait_until(timeout, &condition) } 33 | end 34 | 35 | def ns_wait_until(timeout = nil, &condition) 36 | synchronize { super(timeout, &condition) } 37 | end 38 | 39 | def signal 40 | @Lock.synchronize { ns_signal } 41 | end 42 | 43 | def ns_signal 44 | synchronize { super } 45 | end 46 | 47 | def broadcast 48 | @Lock.synchronize { ns_broadcast } 49 | end 50 | 51 | def ns_broadcast 52 | synchronize { super } 53 | end 54 | end 55 | 56 | class LockableObject < LockableObjectImplementation 57 | def new_condition 58 | Condition.private_new(self) 59 | end 60 | end 61 | end 62 | end 63 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/synchronization/full_memory_barrier.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/utility/native_extension_loader' # load native parts first 2 | 3 | module Concurrent 4 | module Synchronization 5 | case 6 | when Concurrent.on_cruby? 7 | def self.full_memory_barrier 8 | # relying on undocumented behavior of CRuby, GVL acquire has lock which ensures visibility of ivars 9 | # https://github.com/ruby/ruby/blob/ruby_2_2/thread_pthread.c#L204-L211 10 | end 11 | 12 | when Concurrent.on_jruby? 13 | require 'concurrent/utility/native_extension_loader' 14 | def self.full_memory_barrier 15 | JRubyAttrVolatile.full_memory_barrier 16 | end 17 | 18 | when Concurrent.on_truffleruby? 19 | def self.full_memory_barrier 20 | TruffleRuby.full_memory_barrier 21 | end 22 | 23 | else 24 | warn 'Possibly unsupported Ruby implementation' 25 | def self.full_memory_barrier 26 | end 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/synchronization/jruby_lockable_object.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/utility/native_extension_loader' # load native parts first 2 | 3 | module Concurrent 4 | module Synchronization 5 | 6 | if Concurrent.on_jruby? 7 | 8 | # @!visibility private 9 | # @!macro internal_implementation_note 10 | class JRubyLockableObject < AbstractLockableObject 11 | 12 | end 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/synchronization/lock.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/lockable_object' 2 | 3 | module Concurrent 4 | module Synchronization 5 | 6 | # @!visibility private 7 | # TODO (pitr-ch 04-Dec-2016): should be in edge 8 | class Lock < LockableObject 9 | # TODO use JavaReentrantLock on JRuby 10 | 11 | public :synchronize 12 | 13 | def wait(timeout = nil) 14 | synchronize { ns_wait(timeout) } 15 | end 16 | 17 | public :ns_wait 18 | 19 | def wait_until(timeout = nil, &condition) 20 | synchronize { ns_wait_until(timeout, &condition) } 21 | end 22 | 23 | public :ns_wait_until 24 | 25 | def signal 26 | synchronize { ns_signal } 27 | end 28 | 29 | public :ns_signal 30 | 31 | def broadcast 32 | synchronize { ns_broadcast } 33 | end 34 | 35 | public :ns_broadcast 36 | end 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/synchronization/safe_initialization.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/synchronization/full_memory_barrier' 2 | 3 | module Concurrent 4 | module Synchronization 5 | 6 | # @!visibility private 7 | # @!macro internal_implementation_note 8 | # 9 | # By extending this module, a class and all its children are marked to be constructed safely. Meaning that 10 | # all writes (ivar initializations) are made visible to all readers of newly constructed object. It ensures 11 | # same behaviour as Java's final fields. 12 | # 13 | # Due to using Kernel#extend, the module is not included again if already present in the ancestors, 14 | # which avoids extra overhead. 15 | # 16 | # @example 17 | # class AClass < Concurrent::Synchronization::Object 18 | # extend Concurrent::Synchronization::SafeInitialization 19 | # 20 | # def initialize 21 | # @AFinalValue = 'value' # published safely, #foo will never return nil 22 | # end 23 | # 24 | # def foo 25 | # @AFinalValue 26 | # end 27 | # end 28 | module SafeInitialization 29 | def new(*args, &block) 30 | super(*args, &block) 31 | ensure 32 | Concurrent::Synchronization.full_memory_barrier 33 | end 34 | end 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/thread_safe/readme.txt: -------------------------------------------------------------------------------- 1 | // TODO this directory should be removed over time - remnant of thread_safe merge -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/thread_safe/synchronized_delegator.rb: -------------------------------------------------------------------------------- 1 | require 'delegate' 2 | require 'monitor' 3 | 4 | module Concurrent 5 | # This class provides a trivial way to synchronize all calls to a given object 6 | # by wrapping it with a `Delegator` that performs `Monitor#enter/exit` calls 7 | # around the delegated `#send`. Example: 8 | # 9 | # array = [] # not thread-safe on many impls 10 | # array = SynchronizedDelegator.new([]) # thread-safe 11 | # 12 | # A simple `Monitor` provides a very coarse-grained way to synchronize a given 13 | # object, in that it will cause synchronization for methods that have no need 14 | # for it, but this is a trivial way to get thread-safety where none may exist 15 | # currently on some implementations. 16 | # 17 | # This class is currently being considered for inclusion into stdlib, via 18 | # https://bugs.ruby-lang.org/issues/8556 19 | # 20 | # @!visibility private 21 | class SynchronizedDelegator < SimpleDelegator 22 | def setup 23 | @old_abort = Thread.abort_on_exception 24 | Thread.abort_on_exception = true 25 | end 26 | 27 | def teardown 28 | Thread.abort_on_exception = @old_abort 29 | end 30 | 31 | def initialize(obj) 32 | __setobj__(obj) 33 | @monitor = Monitor.new 34 | end 35 | 36 | def method_missing(method, *args, &block) 37 | monitor = @monitor 38 | begin 39 | monitor.enter 40 | super 41 | ensure 42 | monitor.exit 43 | end 44 | end 45 | 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/thread_safe/util.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | 3 | # @!visibility private 4 | module ThreadSafe 5 | 6 | # @!visibility private 7 | module Util 8 | 9 | # TODO (pitr-ch 15-Oct-2016): migrate to Utility::NativeInteger 10 | FIXNUM_BIT_SIZE = (0.size * 8) - 2 11 | MAX_INT = (2 ** FIXNUM_BIT_SIZE) - 1 12 | # TODO (pitr-ch 15-Oct-2016): migrate to Utility::ProcessorCounter 13 | CPU_COUNT = 16 # is there a way to determine this? 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/thread_safe/util/data_structures.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/thread_safe/util' 2 | require 'concurrent/utility/engine' 3 | 4 | # Shim for TruffleRuby.synchronized 5 | if Concurrent.on_truffleruby? && !TruffleRuby.respond_to?(:synchronized) 6 | module TruffleRuby 7 | def self.synchronized(object, &block) 8 | Truffle::System.synchronized(object, &block) 9 | end 10 | end 11 | end 12 | 13 | module Concurrent 14 | module ThreadSafe 15 | module Util 16 | def self.make_synchronized_on_cruby(klass) 17 | klass.class_eval do 18 | def initialize(*args, &block) 19 | @_monitor = Monitor.new 20 | super 21 | end 22 | 23 | def initialize_copy(other) 24 | # make sure a copy is not sharing a monitor with the original object! 25 | @_monitor = Monitor.new 26 | super 27 | end 28 | end 29 | 30 | klass.superclass.instance_methods(false).each do |method| 31 | klass.class_eval <<-RUBY, __FILE__, __LINE__ + 1 32 | def #{method}(*args) 33 | monitor = @_monitor 34 | monitor or raise("BUG: Internal monitor was not properly initialized. Please report this to the concurrent-ruby developers.") 35 | monitor.synchronize { super } 36 | end 37 | RUBY 38 | end 39 | end 40 | 41 | def self.make_synchronized_on_truffleruby(klass) 42 | klass.superclass.instance_methods(false).each do |method| 43 | klass.class_eval <<-RUBY, __FILE__, __LINE__ + 1 44 | def #{method}(*args, &block) 45 | TruffleRuby.synchronized(self) { super(*args, &block) } 46 | end 47 | RUBY 48 | end 49 | end 50 | end 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/thread_safe/util/power_of_two_tuple.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/thread_safe/util' 2 | require 'concurrent/tuple' 3 | 4 | module Concurrent 5 | 6 | # @!visibility private 7 | module ThreadSafe 8 | 9 | # @!visibility private 10 | module Util 11 | 12 | # @!visibility private 13 | class PowerOfTwoTuple < Concurrent::Tuple 14 | 15 | def initialize(size) 16 | raise ArgumentError, "size must be a power of 2 (#{size.inspect} provided)" unless size > 0 && size & (size - 1) == 0 17 | super(size) 18 | end 19 | 20 | def hash_to_index(hash) 21 | (size - 1) & hash 22 | end 23 | 24 | def volatile_get_by_hash(hash) 25 | volatile_get(hash_to_index(hash)) 26 | end 27 | 28 | def volatile_set_by_hash(hash, value) 29 | volatile_set(hash_to_index(hash), value) 30 | end 31 | 32 | def next_in_size_table 33 | self.class.new(size << 1) 34 | end 35 | end 36 | end 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/thread_safe/util/xor_shift_random.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/thread_safe/util' 2 | 3 | module Concurrent 4 | 5 | # @!visibility private 6 | module ThreadSafe 7 | 8 | # @!visibility private 9 | module Util 10 | 11 | # A xorshift random number (positive +Fixnum+s) generator, provides 12 | # reasonably cheap way to generate thread local random numbers without 13 | # contending for the global +Kernel.rand+. 14 | # 15 | # Usage: 16 | # x = XorShiftRandom.get # uses Kernel.rand to generate an initial seed 17 | # while true 18 | # if (x = XorShiftRandom.xorshift).odd? # thread-locally generate a next random number 19 | # do_something_at_random 20 | # end 21 | # end 22 | module XorShiftRandom 23 | extend self 24 | MAX_XOR_SHIFTABLE_INT = MAX_INT - 1 25 | 26 | # Generates an initial non-zero positive +Fixnum+ via +Kernel.rand+. 27 | def get 28 | Kernel.rand(MAX_XOR_SHIFTABLE_INT) + 1 # 0 can't be xorshifted 29 | end 30 | 31 | # xorshift based on: http://www.jstatsoft.org/v08/i14/paper 32 | if 0.size == 4 33 | # using the "yˆ=y>>a; yˆ=y<>c;" transform with the (a,b,c) tuple with values (3,1,14) to minimise Bignum overflows 34 | def xorshift(x) 35 | x ^= x >> 3 36 | x ^= (x << 1) & MAX_INT # cut-off Bignum overflow 37 | x ^= x >> 14 38 | end 39 | else 40 | # using the "yˆ=y>>a; yˆ=y<>c;" transform with the (a,b,c) tuple with values (1,1,54) to minimise Bignum overflows 41 | def xorshift(x) 42 | x ^= x >> 1 43 | x ^= (x << 1) & MAX_INT # cut-off Bignum overflow 44 | x ^= x >> 54 45 | end 46 | end 47 | end 48 | end 49 | end 50 | end 51 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/utility/engine.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | # @!visibility private 3 | module Utility 4 | 5 | # @!visibility private 6 | module EngineDetector 7 | def on_cruby? 8 | RUBY_ENGINE == 'ruby' 9 | end 10 | 11 | def on_jruby? 12 | RUBY_ENGINE == 'jruby' 13 | end 14 | 15 | def on_truffleruby? 16 | RUBY_ENGINE == 'truffleruby' 17 | end 18 | 19 | def on_windows? 20 | !(RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/).nil? 21 | end 22 | 23 | def on_osx? 24 | !(RbConfig::CONFIG['host_os'] =~ /darwin|mac os/).nil? 25 | end 26 | 27 | def on_linux? 28 | !(RbConfig::CONFIG['host_os'] =~ /linux/).nil? 29 | end 30 | 31 | def ruby_version(version = RUBY_VERSION, comparison, major, minor, patch) 32 | result = (version.split('.').map(&:to_i) <=> [major, minor, patch]) 33 | comparisons = { :== => [0], 34 | :>= => [1, 0], 35 | :<= => [-1, 0], 36 | :> => [1], 37 | :< => [-1] } 38 | comparisons.fetch(comparison).include? result 39 | end 40 | end 41 | end 42 | 43 | # @!visibility private 44 | extend Utility::EngineDetector 45 | end 46 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/utility/monotonic_time.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | 3 | # @!macro monotonic_get_time 4 | # 5 | # Returns the current time as tracked by the application monotonic clock. 6 | # 7 | # @param [Symbol] unit the time unit to be returned, can be either 8 | # :float_second, :float_millisecond, :float_microsecond, :second, 9 | # :millisecond, :microsecond, or :nanosecond default to :float_second. 10 | # 11 | # @return [Float] The current monotonic time since some unspecified 12 | # starting point 13 | # 14 | # @!macro monotonic_clock_warning 15 | def monotonic_time(unit = :float_second) 16 | Process.clock_gettime(Process::CLOCK_MONOTONIC, unit) 17 | end 18 | module_function :monotonic_time 19 | end 20 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/utility/native_integer.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | # @!visibility private 3 | module Utility 4 | # @private 5 | module NativeInteger 6 | # http://stackoverflow.com/questions/535721/ruby-max-integer 7 | MIN_VALUE = -(2**(0.size * 8 - 2)) 8 | MAX_VALUE = (2**(0.size * 8 - 2) - 1) 9 | 10 | def ensure_upper_bound(value) 11 | if value > MAX_VALUE 12 | raise RangeError.new("#{value} is greater than the maximum value of #{MAX_VALUE}") 13 | end 14 | value 15 | end 16 | 17 | def ensure_lower_bound(value) 18 | if value < MIN_VALUE 19 | raise RangeError.new("#{value} is less than the maximum value of #{MIN_VALUE}") 20 | end 21 | value 22 | end 23 | 24 | def ensure_integer(value) 25 | unless value.is_a?(Integer) 26 | raise ArgumentError.new("#{value} is not an Integer") 27 | end 28 | value 29 | end 30 | 31 | def ensure_integer_and_bounds(value) 32 | ensure_integer value 33 | ensure_upper_bound value 34 | ensure_lower_bound value 35 | end 36 | 37 | def ensure_positive(value) 38 | if value < 0 39 | raise ArgumentError.new("#{value} cannot be negative") 40 | end 41 | value 42 | end 43 | 44 | def ensure_positive_and_no_zero(value) 45 | if value < 1 46 | raise ArgumentError.new("#{value} cannot be negative or zero") 47 | end 48 | value 49 | end 50 | 51 | extend self 52 | end 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /lib/concurrent-ruby/concurrent/version.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | VERSION = '1.3.5' 3 | end 4 | -------------------------------------------------------------------------------- /spec/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/spec/.gitignore -------------------------------------------------------------------------------- /spec/concurrent/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/spec/concurrent/.gitignore -------------------------------------------------------------------------------- /spec/concurrent/atomic/lock_local_var_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/atomic/lock_local_var' 2 | 3 | module Concurrent 4 | 5 | RSpec.describe LockLocalVar do 6 | mutex = Mutex.new 7 | mutex_owned_per_thread = mutex.synchronize do 8 | Fiber.new { mutex.owned? }.resume 9 | end 10 | 11 | it "uses FiberLocalVar if Mutex is per Fiber", if: !mutex_owned_per_thread do 12 | expect(LockLocalVar).to be(FiberLocalVar) 13 | end 14 | 15 | it "uses ThreadLocalVar if Mutex is per Thread", if: mutex_owned_per_thread do 16 | expect(LockLocalVar).to be(ThreadLocalVar) 17 | end 18 | end 19 | 20 | end 21 | -------------------------------------------------------------------------------- /spec/concurrent/channel/buffer/base_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative 'buffered_shared' 2 | require 'concurrent/channel/buffer/base' 3 | 4 | module Concurrent::Channel::Buffer 5 | 6 | RSpec.describe Base, edge: true do 7 | 8 | subject { described_class.new } 9 | 10 | specify do 11 | expect(subject.capacity).to eq 0 12 | end 13 | 14 | specify do 15 | expect(subject).to be_blocking 16 | end 17 | 18 | specify do 19 | expect { 20 | subject.size 21 | }.to raise_error(NotImplementedError) 22 | end 23 | 24 | specify do 25 | expect { 26 | subject.empty? 27 | }.to raise_error(NotImplementedError) 28 | end 29 | 30 | specify do 31 | expect { 32 | subject.full? 33 | }.to raise_error(NotImplementedError) 34 | end 35 | 36 | specify do 37 | expect { 38 | subject.put(42) 39 | }.to raise_error(NotImplementedError) 40 | end 41 | 42 | specify do 43 | expect { 44 | subject.offer(42) 45 | }.to raise_error(NotImplementedError) 46 | end 47 | 48 | specify do 49 | expect { 50 | subject.take 51 | }.to raise_error(NotImplementedError) 52 | end 53 | 54 | specify do 55 | expect { 56 | subject.poll 57 | }.to raise_error(NotImplementedError) 58 | end 59 | 60 | specify do 61 | expect { 62 | subject.next 63 | }.to raise_error(NotImplementedError) 64 | end 65 | 66 | specify do 67 | expect(subject).to_not be_closed 68 | end 69 | 70 | specify do 71 | subject.close 72 | expect(subject).to be_closed 73 | end 74 | end 75 | end 76 | -------------------------------------------------------------------------------- /spec/concurrent/channel/buffer/buffered_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative 'buffered_shared' 2 | 3 | module Concurrent::Channel::Buffer 4 | 5 | RSpec.describe Buffered, edge: true do 6 | 7 | let(:capacity) { 10 } 8 | subject { described_class.new(capacity) } 9 | 10 | it_behaves_like :channel_buffered_buffer 11 | 12 | specify do 13 | expect(subject).to be_blocking 14 | end 15 | 16 | context '#full?' do 17 | it 'returns true when at max capacity' do 18 | subject = described_class.new(1) 19 | subject.put(:foo) 20 | expect(subject).to be_full 21 | end 22 | end 23 | 24 | context '#put' do 25 | it 'blocks when at capacity until a thread is ready to take' do 26 | subject = described_class.new(1) 27 | subject.put(13) 28 | bucket = Concurrent::AtomicReference.new(nil) 29 | t = in_thread do 30 | subject.put(42) 31 | bucket.value = 42 32 | end 33 | 34 | t.join(0.1) 35 | 36 | before = bucket.value 37 | subject.take 38 | t.join(0.1) 39 | after = bucket.value 40 | 41 | expect(before).to be nil 42 | expect(after).to eq 42 43 | expect(t.status).to be false 44 | end 45 | end 46 | 47 | context '#offer' do 48 | it 'returns false immediately when full' do 49 | subject = described_class.new(1) 50 | subject.put(:foo) 51 | expect(subject.offer(:bar)).to be false 52 | end 53 | end 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /spec/concurrent/channel/buffer/dropping_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative 'buffered_shared' 2 | require 'concurrent/channel/buffer/dropping' 3 | 4 | module Concurrent::Channel::Buffer 5 | 6 | RSpec.describe Dropping, edge: true do 7 | 8 | subject { described_class.new(10) } 9 | 10 | it_behaves_like :channel_buffered_buffer 11 | 12 | specify do 13 | expect(subject).to_not be_blocking 14 | end 15 | 16 | context '#put' do 17 | 18 | it 'does not block when full' do 19 | subject = described_class.new(1) 20 | 3.times {|i| expect(subject.put(i)).to be true } 21 | end 22 | 23 | it 'drops the last value when full' do 24 | subject = described_class.new(1) 25 | 3.times{|i| subject.put(i)} 26 | internal_buffer = subject.instance_variable_get(:@buffer) 27 | expect(internal_buffer.size).to eq 1 28 | expect(internal_buffer.first).to eq 0 29 | end 30 | end 31 | 32 | context '#offer' do 33 | 34 | it 'returns true immediately when full' do 35 | subject = described_class.new(1) 36 | subject.put(:foo) 37 | expect(subject.offer(:bar)).to be true 38 | end 39 | 40 | it 'drops the last value when full' do 41 | subject = described_class.new(1) 42 | 3.times{|i| subject.offer(i)} 43 | internal_buffer = subject.instance_variable_get(:@buffer) 44 | expect(internal_buffer.size).to eq 1 45 | expect(internal_buffer.first).to eq 0 46 | end 47 | end 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /spec/concurrent/channel/buffer/sliding_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative 'buffered_shared' 2 | require 'concurrent/channel/buffer/sliding' 3 | 4 | module Concurrent::Channel::Buffer 5 | 6 | RSpec.describe Sliding, edge: true do 7 | 8 | subject { described_class.new(10) } 9 | 10 | it_behaves_like :channel_buffered_buffer 11 | 12 | specify do 13 | expect(subject).to_not be_blocking 14 | end 15 | 16 | context '#put' do 17 | 18 | it 'does not block when full' do 19 | subject = described_class.new(1) 20 | 3.times {|i| expect(subject.put(i)).to be true } 21 | end 22 | 23 | it 'drops the first value when full' do 24 | subject = described_class.new(1) 25 | 3.times{|i| subject.put(i)} 26 | internal_buffer = subject.instance_variable_get(:@buffer) 27 | expect(internal_buffer.size).to eq 1 28 | expect(internal_buffer.first).to eq 2 29 | end 30 | end 31 | 32 | context '#offer' do 33 | 34 | it 'returns true immediately when full' do 35 | subject = described_class.new(1) 36 | subject.put(:foo) 37 | expect(subject.offer(:bar)).to be true 38 | end 39 | 40 | it 'drops the first value when full' do 41 | subject = described_class.new(1) 42 | 3.times{|i| subject.offer(i)} 43 | internal_buffer = subject.instance_variable_get(:@buffer) 44 | expect(internal_buffer.size).to eq 1 45 | expect(internal_buffer.first).to eq 2 46 | end 47 | end 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /spec/concurrent/channel/buffer/ticker_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative 'timing_buffer_shared' 2 | require 'concurrent/channel/buffer/ticker' 3 | 4 | module Concurrent::Channel::Buffer 5 | 6 | RSpec.describe Ticker, edge: true do 7 | 8 | let(:delay) { 0.1 } 9 | subject { described_class.new(delay) } 10 | 11 | it_behaves_like :channel_timing_buffer 12 | 13 | context '#take' do 14 | it 'triggers until closed' do 15 | expected = 3 16 | actual = 0 17 | expected.times { actual += 1 if subject.take.is_a? Concurrent::Channel::Tick } 18 | expect(actual).to eq expected 19 | end 20 | 21 | it 'returns Concurrent::NULL when closed after trigger' do 22 | subject.take 23 | subject.close 24 | expect(subject).to be_closed 25 | expect(subject.take).to eq Concurrent::NULL 26 | end 27 | end 28 | 29 | context '#poll' do 30 | it 'triggers until closed' do 31 | expected = 3 32 | actual = 0 33 | expected.times do 34 | until subject.poll.is_a?(Concurrent::Channel::Tick) 35 | actual += 1 36 | end 37 | end 38 | end 39 | end 40 | 41 | context '#next' do 42 | it 'triggers until closed' do 43 | expected = 3 44 | actual = 0 45 | expected.times { actual += 1 if subject.next.first.is_a? Concurrent::Channel::Tick } 46 | expect(actual).to eq expected 47 | end 48 | 49 | it 'returns true for more while open' do 50 | _, more = subject.next 51 | expect(more).to be true 52 | end 53 | 54 | it 'returns false for more once closed' do 55 | subject.close 56 | _, more = subject.next 57 | expect(more).to be false 58 | end 59 | end 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /spec/concurrent/channel/buffer/timer_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative 'timing_buffer_shared' 2 | require 'concurrent/channel/buffer/timer' 3 | require 'concurrent/atomic/atomic_boolean' 4 | 5 | module Concurrent::Channel::Buffer 6 | 7 | RSpec.describe Timer, edge: true do 8 | 9 | let(:delay) { 0.1 } 10 | subject { described_class.new(0.1) } 11 | 12 | it_behaves_like :channel_timing_buffer 13 | 14 | context '#take' do 15 | it 'closes automatically on first take' do 16 | expect(subject.take).to be_truthy 17 | expect(subject).to be_closed 18 | end 19 | end 20 | 21 | context '#poll' do 22 | it 'closes automatically on first take' do 23 | loop do 24 | break if subject.poll != Concurrent::NULL 25 | end 26 | expect(subject).to be_closed 27 | end 28 | end 29 | 30 | context '#next' do 31 | it 'closes automatically on first take' do 32 | loop do 33 | value, _ = subject.next 34 | break if value != Concurrent::NULL 35 | end 36 | expect(subject).to be_closed 37 | end 38 | 39 | it 'returns false for more' do 40 | _, more = subject.next 41 | expect(more).to be false 42 | end 43 | end 44 | end 45 | end 46 | -------------------------------------------------------------------------------- /spec/concurrent/collection/copy_on_notify_observer_set_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/collection/copy_on_notify_observer_set' 2 | require_relative 'observer_set_shared' 3 | 4 | module Concurrent 5 | module Collection 6 | RSpec.describe CopyOnNotifyObserverSet do 7 | it_behaves_like 'an observer set' 8 | end 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /spec/concurrent/collection/copy_on_write_observer_set_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/collection/copy_on_write_observer_set' 2 | require_relative 'observer_set_shared' 3 | 4 | module Concurrent 5 | module Collection 6 | 7 | RSpec.describe CopyOnWriteObserverSet do 8 | it_behaves_like 'an observer set' 9 | end 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /spec/concurrent/collection_each_shared.rb: -------------------------------------------------------------------------------- 1 | RSpec.shared_examples :collection_each do 2 | 3 | it 'common' do 4 | @cache.send(method) { |k, v| fail } 5 | expect(@cache).to eq @cache.send(method) {} 6 | @cache[:a] = 1 7 | 8 | h = {} 9 | @cache.send(method) { |k, v| h[k] = v } 10 | expect({:a => 1}).to eq h 11 | 12 | @cache[:b] = 2 13 | h = {} 14 | @cache.send(method) { |k, v| h[k] = v } 15 | expect({:a => 1, :b => 2}).to eq h 16 | end 17 | 18 | it 'pair iterator' do 19 | @cache[:a] = 1 20 | @cache[:b] = 2 21 | i = 0 22 | r = @cache.send(method) do |k, v| 23 | if i == 0 24 | i += 1 25 | next 26 | elsif i == 1 27 | break :breaked 28 | end 29 | end 30 | 31 | expect(:breaked).to eq r 32 | end 33 | 34 | it 'allows modification' do 35 | @cache[:a] = 1 36 | @cache[:b] = 1 37 | @cache[:c] = 1 38 | 39 | expect_size_change(1) do 40 | @cache.send(method) do |k, v| 41 | @cache[:z] = 1 42 | end 43 | end 44 | end 45 | 46 | context 'when no block is given' do 47 | it 'returns an enumerator' do 48 | @cache[:a] = 1 49 | @cache[:b] = 2 50 | 51 | expect(@cache.send(method)).to be_a Enumerator 52 | end 53 | 54 | it 'returns an object which is enumerable' do 55 | @cache[:a] = 1 56 | @cache[:b] = 2 57 | 58 | expect(@cache.send(method).to_a).to contain_exactly([:a, 1], [:b, 2]) 59 | end 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /spec/concurrent/concern/observable_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/concern/observable' 2 | 3 | module Concurrent 4 | module Concern 5 | 6 | RSpec.describe Observable do 7 | 8 | let (:described_class) do 9 | Class.new do 10 | include Observable 11 | public :observers, :observers= 12 | end 13 | end 14 | 15 | let(:observer_set) { double(:observer_set) } 16 | subject { described_class.new } 17 | 18 | before(:each) do 19 | subject.observers = observer_set 20 | end 21 | 22 | it 'does not initialize set by by default' do 23 | expect(described_class.new.observers).to be_nil 24 | end 25 | 26 | it 'uses the given observer set' do 27 | expected = Collection::CopyOnWriteObserverSet.new 28 | subject.observers = expected 29 | expect(subject.observers).to eql expected 30 | end 31 | 32 | it 'delegates #add_observer' do 33 | expect(observer_set).to receive(:add_observer).with(:observer, :update) { |v| v } 34 | expect(subject.add_observer(:observer)).to eq :observer 35 | end 36 | 37 | it 'delegates #with_observer' do 38 | expect(observer_set).to receive(:add_observer).with(:observer, :update) { |v| v } 39 | expect(subject.with_observer(:observer)).to eq subject 40 | end 41 | 42 | it 'delegates #delete_observer' do 43 | expect(observer_set).to receive(:delete_observer).with(:observer) 44 | subject.delete_observer(:observer) 45 | end 46 | 47 | it 'delegates #delete_observers' do 48 | expect(observer_set).to receive(:delete_observers).with(no_args) 49 | subject.delete_observers 50 | end 51 | 52 | it 'delegates #count_observers' do 53 | expect(observer_set).to receive(:count_observers).with(no_args) 54 | subject.count_observers 55 | end 56 | end 57 | end 58 | end 59 | -------------------------------------------------------------------------------- /spec/concurrent/configuration_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/configuration' 2 | 3 | module Concurrent 4 | 5 | RSpec.describe 'configuration' do 6 | 7 | context 'global executors' do 8 | 9 | it 'creates a global timer set' do 10 | expect(Concurrent.global_timer_set).not_to be_nil 11 | expect(Concurrent.global_timer_set).to respond_to(:post) 12 | end 13 | 14 | it 'creates a global fast executor' do 15 | expect(Concurrent.global_fast_executor).not_to be_nil 16 | expect(Concurrent.global_fast_executor).to respond_to(:post) 17 | end 18 | 19 | it 'creates a global io executor' do 20 | expect(Concurrent.global_io_executor).not_to be_nil 21 | expect(Concurrent.global_io_executor).to respond_to(:post) 22 | end 23 | 24 | end 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /spec/concurrent/executor/executor_quits.rb: -------------------------------------------------------------------------------- 1 | 2 | lib = File.expand_path '../../../lib/concurrent-ruby/' 3 | $LOAD_PATH.push lib unless $LOAD_PATH.include? lib 4 | 5 | require 'concurrent' 6 | 7 | executors = [Concurrent::CachedThreadPool.new, Concurrent::SingleThreadExecutor.new, Concurrent::FixedThreadPool.new(1)] 8 | executors.each do |executor| 9 | executor.post do 10 | sleep # sleep indefinitely 11 | end 12 | end 13 | 14 | # the process main thread should quit out which should kill the daemon CachedThreadPool 15 | -------------------------------------------------------------------------------- /spec/concurrent/executor/global_thread_pool_shared.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/atomic/count_down_latch' 2 | 3 | RSpec.shared_examples :global_thread_pool do 4 | 5 | context '#post' do 6 | 7 | it 'raises an exception if no block is given' do 8 | expect { 9 | subject.post 10 | }.to raise_error(ArgumentError) 11 | end 12 | 13 | it 'returns true when the block is added to the queue' do 14 | expect(subject.post{ nil }).to be_truthy 15 | end 16 | 17 | it 'calls the block with the given arguments' do 18 | latch = Concurrent::CountDownLatch.new(1) 19 | expected = nil 20 | subject.post(1, 2, 3) do |a, b, c| 21 | expected = [a, b, c] 22 | latch.count_down 23 | end 24 | latch.wait(0.2) 25 | expect(expected).to eq [1, 2, 3] 26 | end 27 | 28 | it 'aliases #<<' do 29 | latch = Concurrent::CountDownLatch.new(1) 30 | subject << proc { latch.count_down } 31 | expect(latch.wait(0.2)).to eq true 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /spec/concurrent/executor/immediate_executor_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/immediate_executor' 2 | require_relative 'executor_service_shared' 3 | 4 | module Concurrent 5 | 6 | RSpec.describe ImmediateExecutor do 7 | 8 | subject { ImmediateExecutor.new } 9 | 10 | it_should_behave_like :executor_service, immediate_type: true 11 | end 12 | end 13 | -------------------------------------------------------------------------------- /spec/concurrent/executor/indirect_immediate_executor_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/indirect_immediate_executor' 2 | require_relative 'executor_service_shared' 3 | 4 | module Concurrent 5 | 6 | RSpec.describe IndirectImmediateExecutor do 7 | 8 | subject { IndirectImmediateExecutor.new } 9 | 10 | it_should_behave_like :executor_service, immediate_type: true 11 | 12 | it "runs its tasks synchronously" do 13 | start = Time.now 14 | subject.post { sleep 0.1 } 15 | 16 | expect(Time.now - start).to be >= 0.1 17 | end 18 | 19 | it "runs the task on a separate thread" do 20 | used_thread = nil 21 | subject.post { used_thread = Thread.current } 22 | 23 | expect(used_thread).not_to be(Thread.current) 24 | end 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /spec/concurrent/executor/java_single_thread_executor_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/utility/engine' 2 | 3 | if Concurrent.on_jruby? 4 | require_relative 'executor_service_shared' 5 | 6 | module Concurrent 7 | 8 | RSpec.describe JavaSingleThreadExecutor, :type=>:jruby do 9 | 10 | after(:each) do 11 | subject.shutdown 12 | expect(subject.wait_for_termination(pool_termination_timeout)).to eq true 13 | end 14 | 15 | subject { JavaSingleThreadExecutor.new } 16 | 17 | it_should_behave_like :executor_service 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /spec/concurrent/executor/ruby_single_thread_executor_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/ruby_single_thread_executor' 2 | require_relative 'executor_service_shared' 3 | 4 | module Concurrent 5 | 6 | RSpec.describe RubySingleThreadExecutor, :type=>:mri do 7 | 8 | after(:each) do 9 | subject.shutdown 10 | expect(subject.wait_for_termination(pool_termination_timeout)).to eq true 11 | end 12 | 13 | subject { RubySingleThreadExecutor.new } 14 | 15 | it_should_behave_like :executor_service 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /spec/concurrent/executor/serialized_execution_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/serialized_execution_delegator' 2 | require 'concurrent/executor/immediate_executor' 3 | require_relative 'executor_service_shared' 4 | 5 | module Concurrent 6 | 7 | RSpec.describe SerializedExecutionDelegator do 8 | 9 | subject { SerializedExecutionDelegator.new(ImmediateExecutor.new) } 10 | 11 | it_should_behave_like :executor_service, immediate_type: true 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /spec/concurrent/executor/thread_pool_class_cast_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/single_thread_executor' 2 | require 'concurrent/executor/thread_pool_executor' 3 | 4 | module Concurrent 5 | RSpec.describe SingleThreadExecutor do 6 | if Concurrent.on_jruby? 7 | it 'inherits from JavaSingleThreadExecutor' do 8 | expect(SingleThreadExecutor.ancestors).to include(JavaSingleThreadExecutor) 9 | end 10 | else 11 | it 'inherits from RubySingleThreadExecutor' do 12 | expect(SingleThreadExecutor.ancestors).to include(RubySingleThreadExecutor) 13 | end 14 | end 15 | end 16 | 17 | RSpec.describe ThreadPoolExecutor do 18 | if Concurrent.on_jruby? 19 | it 'inherits from JavaThreadPoolExecutor' do 20 | expect(ThreadPoolExecutor.ancestors).to include(JavaThreadPoolExecutor) 21 | end 22 | else 23 | it 'inherits from RubyThreadPoolExecutor' do 24 | expect(ThreadPoolExecutor.ancestors).to include(RubyThreadPoolExecutor) 25 | end 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /spec/concurrent/executor/wrapping_executor_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/executor/wrapping_executor' 2 | require 'concurrent/configuration' 3 | 4 | module Concurrent 5 | RSpec.describe WrappingExecutor do 6 | 7 | let(:wrapping_executor) { WrappingExecutor.new(executor, &wrapper) } 8 | let(:executor) { Concurrent.global_fast_executor } 9 | let(:wrapper) { nil } 10 | let(:args) { { foo: 'bar', baz: 42 } } 11 | let(:task) { -> (*args) { return nil } } 12 | 13 | subject { wrapping_executor } 14 | 15 | it { is_expected.to be_kind_of(WrappingExecutor) } 16 | it { is_expected.to respond_to(:post) } 17 | it { is_expected.to respond_to(:can_overflow?) } 18 | it { is_expected.to respond_to(:serialized?) } 19 | 20 | describe '#post' do 21 | context 'with passthrough wrapper' do 22 | let(:wrapper) { -> (*args, &task) { return *args, task } } 23 | 24 | it { 25 | expect(executor).to receive(:post).with(args) { |&block| expect(block).to be(task) } 26 | wrapping_executor.post(args, &task) 27 | } 28 | end 29 | 30 | context 'with wrapper modifying args' do 31 | let(:wrapper) { -> (*args, &task) { return *args, { xyz: 'abc' }, task } } 32 | 33 | it { 34 | expect(executor).to receive(:post).with(args, { xyz: 'abc' }) { |&block| expect(block).to be(task) } 35 | wrapping_executor.post(args, &task) 36 | } 37 | end 38 | 39 | context 'with wrapper modifying task' do 40 | let(:wrapper) { -> (*args, &task) { return *args, another_task } } 41 | let(:another_task) { -> (*args) { return true } } 42 | 43 | it { 44 | expect(executor).to receive(:post).with(args) { |&block| expect(block).to be(another_task) } 45 | wrapping_executor.post(args, &task) 46 | } 47 | end 48 | 49 | end 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /spec/concurrent/immutable_struct_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative 'struct_shared' 2 | require 'concurrent/immutable_struct' 3 | 4 | module Concurrent 5 | RSpec.describe ImmutableStruct do 6 | it_should_behave_like :struct 7 | it_should_behave_like :mergeable_struct 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /spec/concurrent/lazy_register_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/lazy_register' 2 | 3 | module Concurrent 4 | RSpec.describe LazyRegister do 5 | 6 | pending 7 | 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /spec/concurrent/monotonic_time_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/utility/monotonic_time' 2 | 3 | module Concurrent 4 | 5 | RSpec.describe :monotonic_time do 6 | context 'behavior' do 7 | 8 | it 'returns seconds as float' do 9 | expect(Concurrent.monotonic_time).to be_a(Float) 10 | end 11 | 12 | [:float_second, :float_millisecond, :float_microsecond].each do |unit| 13 | it "returns a Float when unit = #{unit.inspect}" do 14 | expect(Concurrent.monotonic_time(unit)).to be_a(Float) 15 | end 16 | end 17 | 18 | [:second, :millisecond, :microsecond, :nanosecond].each do |unit| 19 | it "returns an Integer when unit = #{unit.inspect}" do 20 | expect(Concurrent.monotonic_time(unit)).to be_an(Integer) 21 | end 22 | end 23 | 24 | it 'raises ArgumentError on unknown units' do 25 | expect { 26 | Concurrent.monotonic_time(:foo) 27 | }.to raise_error(ArgumentError) 28 | end 29 | 30 | end 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /spec/concurrent/no_concurrent_files_loaded_before_spec.rb: -------------------------------------------------------------------------------- 1 | files_loaded_before = $LOADED_FEATURES.grep(/\/concurrent\//).grep_v(/\/version\.rb$/) 2 | 3 | RSpec.describe 'The test harness', if: ENV['ISOLATED'] do 4 | it 'does not load concurrent-ruby files to ensure there are no missing requires' do 5 | expect(files_loaded_before).to eq [] 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /spec/concurrent/options_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/options' 2 | 3 | module Concurrent 4 | 5 | RSpec.describe Options do 6 | 7 | context '.executor_from_options' do 8 | 9 | let(:executor) { ImmediateExecutor.new } 10 | let(:io_executor) { ImmediateExecutor.new } 11 | let(:fast_executor) { ImmediateExecutor.new } 12 | 13 | it 'returns the given :executor' do 14 | expect(Options.executor_from_options(executor: executor)).to eq executor 15 | end 16 | 17 | it 'returns the global io executor when :executor is :io' do 18 | executor = Options.executor_from_options(executor: :io) 19 | expect(executor).to eq Concurrent.global_io_executor 20 | end 21 | 22 | it 'returns the global fast executor when :executor is :fast' do 23 | executor = Options.executor_from_options(executor: :fast) 24 | expect(executor).to eq Concurrent.global_fast_executor 25 | end 26 | 27 | it 'returns an immediate executor when :executor is :immediate' do 28 | executor = Options.executor_from_options(executor: :immediate) 29 | expect(executor).to be_a Concurrent::ImmediateExecutor 30 | end 31 | 32 | it 'raises an exception when :executor is an unrecognized symbol' do 33 | expect { 34 | Options.executor_from_options(executor: :bogus) 35 | }.to raise_error(ArgumentError) 36 | end 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /spec/concurrent/require_all_files_separately.rb: -------------------------------------------------------------------------------- 1 | RSpec.describe 'Every file', if: ENV['ISOLATED'] do 2 | it 'can be required on its own' do 3 | root = File.expand_path('../..', __dir__) 4 | require_paths = [] 5 | dirs = ["#{root}/lib/concurrent-ruby", "#{root}/lib/concurrent-ruby-edge"] 6 | dirs.each do |dir| 7 | Dir.glob("#{dir}/**/*.rb") do |file| 8 | require_path = file[dir.size + 1...-3] 9 | private_file = %w[ruby_ java_ jruby_ truffleruby_].any? { |prefix| 10 | File.basename(require_path).start_with?(prefix) 11 | } 12 | unless private_file 13 | require_paths << require_path 14 | end 15 | end 16 | end 17 | 18 | require_paths.each do |require_path| 19 | # An easy way to see the output and backtrace without RSpec formatting it 20 | # raise require_path unless system RbConfig.ruby, '-w', '-e', 'require ARGV.first', require_path 21 | 22 | # puts require_path 23 | out = IO.popen([RbConfig.ruby, '-w', '-e', 'require ARGV.first', require_path], err: [:child, :out], &:read) 24 | status = $? 25 | expect(out).to eq "" 26 | expect(status).to be_success 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /spec/concurrent/thread_arguments_shared.rb: -------------------------------------------------------------------------------- 1 | 2 | RSpec.shared_examples :thread_arguments do 3 | 4 | it 'passes an empty array when opts is not given' do 5 | future = get_ivar_from_no_args 6 | expect(future.value).to eq [] 7 | end 8 | 9 | it 'passes an empty array when opts is an empty hash' do 10 | future = get_ivar_from_args({}) 11 | expect(future.value).to eq [] 12 | end 13 | 14 | it 'passes an empty array when there is no :args key' do 15 | future = get_ivar_from_args(foo: 'bar') 16 | expect(future.value).to eq [] 17 | end 18 | 19 | it 'passes an empty array when the :args key has a nil value' do 20 | future = get_ivar_from_args(args: nil) 21 | expect(future.value).to eq [] 22 | end 23 | 24 | it 'passes a one-element array when the :args key has a non-array value' do 25 | future = get_ivar_from_args(args: 'foo') 26 | expect(future.value).to eq ['foo'] 27 | end 28 | 29 | it 'passes an array when when the :args key has an array value' do 30 | expected = [1, 2, 3, 4] 31 | future = get_ivar_from_args(args: expected) 32 | expect(future.value).to eq expected 33 | end 34 | 35 | it 'passes the given array when the :args key has a complex array value' do 36 | expected = [(1..10).to_a, (20..30).to_a, (100..110).to_a] 37 | future = get_ivar_from_args(args: expected) 38 | expect(future.value).to eq expected 39 | end 40 | 41 | it 'allows the given arguments array to be dereferenced' do 42 | expected = [1, 2, 3, 4] 43 | future = get_ivar_from_args(args: expected) 44 | expect(future.value).to eq expected 45 | end 46 | end 47 | -------------------------------------------------------------------------------- /spec/concurrent/thread_safe/no_unsafe_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/utility/engine' 2 | 3 | if Concurrent.on_jruby? && ENV['TEST_NO_UNSAFE'] 4 | # to be used like this: rake test TEST_NO_UNSAFE=true 5 | load 'test/package.jar' 6 | java_import 'thread_safe.SecurityManager' 7 | manager = SecurityManager.new 8 | 9 | # Prevent accessing internal classes 10 | manager.deny(java.lang.RuntimePermission.new('accessClassInPackage.sun.misc')) 11 | java.lang.System.setSecurityManager(manager) 12 | 13 | module Concurrent 14 | describe 'no_unsafe' do 15 | it 'security_manager_is_used' do 16 | begin 17 | java_import 'sun.misc.Unsafe' 18 | fail 19 | rescue SecurityError 20 | end 21 | end 22 | 23 | it 'no_unsafe_version_of_chmv8_is_used' do 24 | require 'concurrent/thread_safe/jruby_cache_backend' # make sure the jar has been loaded 25 | expect(!Java::OrgJrubyExtThread_safe::JRubyMapBackendLibrary::JRubyMapBackend::CAN_USE_UNSAFE_CHM).to be_truthy 26 | end 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /spec/concurrent/thread_safe/synchronized_delegator_spec.rb: -------------------------------------------------------------------------------- 1 | require 'concurrent/thread_safe/synchronized_delegator' 2 | 3 | module Concurrent 4 | RSpec.describe SynchronizedDelegator do 5 | it 'wraps array' do 6 | array = ::Array.new 7 | sync_array = described_class.new(array) 8 | 9 | array << 1 10 | expect(1).to eq sync_array[0] 11 | 12 | sync_array << 2 13 | expect(2).to eq array[1] 14 | end 15 | 16 | it 'synchronizes access' do 17 | t1_continue, t2_continue = false, false 18 | 19 | hash = ::Hash.new do |the_hash, key| 20 | t2_continue = true 21 | unless the_hash.find { |e| e[1] == key.to_s } # just to do something 22 | the_hash[key] = key.to_s 23 | Thread.pass until t1_continue 24 | end 25 | end 26 | sync_hash = described_class.new(hash) 27 | sync_hash[1] = 'egy' 28 | 29 | t1 = in_thread do 30 | sync_hash[2] = 'dva' 31 | sync_hash[3] # triggers t2_continue 32 | end 33 | 34 | t2 = in_thread do 35 | Thread.pass until t2_continue 36 | sync_hash[4] = '42' 37 | end 38 | 39 | sleep(0.05) # sleep some to allow threads to boot 40 | 41 | until t2.status == 'sleep' do 42 | Thread.pass 43 | end 44 | 45 | expect(3).to eq hash.keys.size 46 | 47 | t1_continue = true 48 | t1.join; t2.join 49 | 50 | expect(4).to eq sync_hash.size 51 | end 52 | 53 | it 'synchronizes access with block' do 54 | t1_continue, t2_continue = false, false 55 | 56 | array = ::Array.new 57 | sync_array = described_class.new(array) 58 | 59 | t1 = in_thread do 60 | sync_array << 1 61 | sync_array.each do 62 | t2_continue = true 63 | Thread.pass until t1_continue 64 | end 65 | end 66 | 67 | t2 = in_thread do 68 | # sleep(0.01) 69 | Thread.pass until t2_continue 70 | sync_array << 2 71 | end 72 | 73 | until t2.status == 'sleep' || t2.status == false 74 | Thread.pass 75 | end 76 | 77 | expect(1).to eq array.size 78 | 79 | t1_continue = true 80 | t1.join; t2.join 81 | 82 | expect([1, 2]).to eq array 83 | end 84 | end 85 | end 86 | -------------------------------------------------------------------------------- /spec/support/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruby-concurrency/concurrent-ruby/c8f0bae98f1233dde681441b85dbc29a869267bd/spec/support/.gitignore -------------------------------------------------------------------------------- /spec/support/example_group_extensions.rb: -------------------------------------------------------------------------------- 1 | require 'rbconfig' 2 | 3 | module Concurrent 4 | module TestHelpers 5 | extend self 6 | 7 | def pool_termination_timeout 8 | 5 9 | end 10 | 11 | def delta(v1, v2) 12 | if block_given? 13 | v1 = yield(v1) 14 | v2 = yield(v2) 15 | end 16 | return (v1 - v2).abs 17 | end 18 | 19 | def monotonic_interval 20 | raise ArgumentError.new('no block given') unless block_given? 21 | start_time = Concurrent.monotonic_time 22 | yield 23 | Concurrent.monotonic_time - start_time 24 | end 25 | 26 | def in_fiber(&block) 27 | Fiber.new(&block) 28 | end 29 | 30 | def in_thread(*arguments, &block) 31 | @created_threads ||= Queue.new 32 | new_thread = Thread.new(*arguments) do |*args, &b| 33 | Thread.abort_on_exception = true 34 | block.call(*args, &b) 35 | end 36 | @created_threads << new_thread 37 | new_thread 38 | end 39 | 40 | def is_sleeping(thread) 41 | expect(in_thread { Thread.pass until thread.status == 'sleep' }.join(1)).not_to eq nil 42 | end 43 | 44 | def repeat_until_success(timeout = 5, &test) 45 | start_time = Concurrent.monotonic_time 46 | last_exception = nil 47 | while Concurrent.monotonic_time - start_time < timeout 48 | begin 49 | test.call 50 | return true 51 | rescue Exception => e 52 | last_exception = e 53 | Thread.pass 54 | end 55 | end 56 | raise last_exception 57 | end 58 | 59 | def join_with(threads, timeout = 5) 60 | threads = Array(threads) 61 | threads.each do |t| 62 | joined_thread = t.join(timeout * threads.size) 63 | expect(joined_thread).not_to eq nil 64 | end 65 | end 66 | end 67 | end 68 | -------------------------------------------------------------------------------- /spec/support/threadsafe_test.rb: -------------------------------------------------------------------------------- 1 | module Concurrent 2 | module ThreadSafe 3 | module Test 4 | 5 | THREADS = (RUBY_ENGINE == 'ruby' ? 100 : 10) 6 | 7 | class Barrier 8 | def initialize(count = 1) 9 | @count = count 10 | @mutex = Mutex.new 11 | @cond = ConditionVariable.new 12 | end 13 | 14 | def release 15 | @mutex.synchronize do 16 | @count -= 1 if @count > 0 17 | @cond.broadcast if @count.zero? 18 | end 19 | end 20 | 21 | def await 22 | @mutex.synchronize do 23 | if @count.zero? # fall through 24 | elsif @count > 0 25 | @count -= 1 26 | @count.zero? ? @cond.broadcast : @cond.wait(@mutex) 27 | end 28 | end 29 | end 30 | end 31 | 32 | class HashCollisionKey 33 | attr_reader :hash, :key 34 | def initialize(key, hash = key.hash % 3) 35 | @key = key 36 | @hash = hash 37 | end 38 | 39 | def eql?(other) 40 | other.kind_of?(self.class) && @key.eql?(other.key) 41 | end 42 | 43 | def even? 44 | @key.even? 45 | end 46 | 47 | def <=>(other) 48 | @key <=> other.key 49 | end 50 | end 51 | 52 | # having 4 separate HCK classes helps for a more thorough CHMV8 testing 53 | class HashCollisionKey2 < HashCollisionKey; end 54 | class HashCollisionKeyNoCompare < HashCollisionKey 55 | def <=>(other) 56 | 0 57 | end 58 | end 59 | class HashCollisionKey4 < HashCollisionKeyNoCompare; end 60 | 61 | HASH_COLLISION_CLASSES = [HashCollisionKey, HashCollisionKey2, HashCollisionKeyNoCompare, HashCollisionKey4] 62 | 63 | def self.HashCollisionKey(key, hash = key.hash % 3) 64 | HASH_COLLISION_CLASSES[rand(4)].new(key, hash) 65 | end 66 | 67 | class HashCollisionKeyNonComparable < HashCollisionKey 68 | undef <=> 69 | end 70 | end 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /support/generate_docs.rb: -------------------------------------------------------------------------------- 1 | # Using Ruby 2.6 to run this as the docs of 1.1.5 need it 2 | 3 | root = File.dirname(__dir__) 4 | versions_file = "#{root}/docs-source/signpost.md" 5 | # Note: 1.0.5 and 1.1.4 are too old, and use different rake task names 6 | versions = File.read(versions_file).scan(/\[(\d+\.\d+\.\d+) with/).map(&:first) 7 | versions.reverse! 8 | 9 | def sh(*args) 10 | command = "$ #{args.join(' ')}" 11 | puts command 12 | unless system(*args, exception: true) 13 | raise "Failed: #{command}" 14 | end 15 | end 16 | 17 | sh "rm", "-rf", "site" 18 | sh "mkdir", "site" 19 | 20 | versions.each do |version| 21 | puts 22 | puts version 23 | sh "git", "checkout", "v#{version}" 24 | has_docs = Dir.exist?('docs') 25 | 26 | sh "rm", "-f", "Gemfile.lock" 27 | sh "bundle", "install" 28 | sh "bundle", "exec", "rake", "yard:#{version}" 29 | 30 | sh "cp", "-R", "docs/#{version}", "site" 31 | sh "rm", "-rf", "docs/#{version}" 32 | sh "git", "restore", "docs" if has_docs 33 | sh "git", "restore", "docs-source" 34 | end 35 | 36 | sh "git", "checkout", "master" 37 | 38 | sh "rm", "-f", "Gemfile.lock" 39 | sh "bundle", "install" 40 | sh "bundle", "exec", "rake", "yard" 41 | 42 | versions.each do |version| 43 | sh "cp", "-R", "site/#{version}", "docs/#{version}" 44 | end 45 | 46 | sh "rm", "-rf", "site" 47 | -------------------------------------------------------------------------------- /support/yard_full_types.rb: -------------------------------------------------------------------------------- 1 | module YARD 2 | 3 | VERSION[0..2] == '0.9' or raise 'incompatible YARD' 4 | 5 | module Templates::Helpers 6 | 7 | # make sure the signatures are complete not simplified with 8 | # '...' and '?' instead of nil 9 | module HtmlHelper 10 | def signature_types(meth, link = true) 11 | meth = convert_method_to_overload(meth) 12 | if meth.respond_to?(:object) && !meth.has_tag?(:return) 13 | meth = meth.object 14 | end 15 | 16 | type = options.default_return || "" 17 | if meth.tag(:return) && meth.tag(:return).types 18 | types = meth.tags(:return).map {|t| t.types ? t.types : [] }.flatten.uniq 19 | first = link ? h(types.first) : format_types([types.first], false) 20 | # if types.size == 2 && types.last == 'nil' 21 | # type = first + '?' 22 | if types.size == 2 && types.last =~ /^(Array)?<#{Regexp.quote types.first}>$/ 23 | type = first + '+' 24 | # elsif types.size > 2 25 | # type = [first, '...'].join(', ') 26 | elsif types == ['void'] && options.hide_void_return 27 | type = "" 28 | else 29 | type = link ? h(types.join(", ")) : format_types(types, false) 30 | end 31 | elsif !type.empty? 32 | type = link ? h(type) : format_types([type], false) 33 | end 34 | type = "#{type} " unless type.empty? 35 | type 36 | end 37 | 38 | # enables :strikethrough extension 39 | def html_markup_markdown(text) 40 | # TODO: other libraries might be more complex 41 | provider = markup_class(:markdown) 42 | if provider.to_s == 'RDiscount' 43 | provider.new(text, :autolink).to_html 44 | elsif provider.to_s == 'RedcarpetCompat' 45 | provider.new(text, :no_intraemphasis, :gh_blockcode, 46 | :fenced_code, :autolink, :tables, 47 | :lax_spacing, :strikethrough).to_html 48 | else 49 | provider.new(text).to_html 50 | end 51 | end 52 | end 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /yard-template/default/layout/html/footer.erb: -------------------------------------------------------------------------------- 1 | 4 | 5 | 15 | -------------------------------------------------------------------------------- /yard-template/default/layout/html/objects.erb: -------------------------------------------------------------------------------- 1 |

Namespace Listing A-Z

2 | 3 | <% if Registry.root.meths(:included => false).size > 0 %> 4 |
  • <%= linkify(Registry.root) %>
5 | <% end %> 6 | 7 | <% i = 0 %> 8 | 9 | 10 | 33 | 34 |
11 | <% @objects_by_letter.sort_by {|l,o| l.to_s }.each do |letter, objects| %> 12 | <% if (i += 1) % 8 == 0 %> 13 | 14 | <% i = 0 %> 15 | <% end %> 16 |
    17 |
  • <%= letter %>
  • 18 |
      19 | <%# better sorting on docs/master/_index.html %> 20 | <%# objects.each do |obj| %> 21 | <% objects.sort_by { |o| [o.name, o.namespace.path] }.each do |obj| %> 22 |
    • 23 | <%= linkify obj, obj.name %> 24 | <% if !obj.namespace.root? %> 25 | (<%= obj.namespace.path %>) 26 | <% end %> 27 |
    • 28 | <% end %> 29 |
    30 |
31 | <% end %> 32 |
35 | -------------------------------------------------------------------------------- /yard-template/default/module/setup.rb: -------------------------------------------------------------------------------- 1 | def sort_listing(list) 2 | list.sort_by do |o| 3 | [o.scope.to_s, 4 | object == o.namespace ? 0 : 1, # sort owned methods first 5 | # o.namespace.to_s.downcase, # sort by included module 6 | o.name.to_s.downcase] 7 | end 8 | end 9 | 10 | # TODO (pitr-ch 01-Jan-2019): enable inherited methods including, and do review of the documentation 11 | 12 | # def method_listing(include_specials = true) 13 | # return @smeths ||= method_listing.reject {|o| special_method?(o) } unless include_specials 14 | # return @meths if defined?(@meths) && @meths 15 | # @meths = object.meths(:inherited => true, :included => !options.embed_mixins.empty?) 16 | # unless options.embed_mixins.empty? 17 | # @meths = @meths.reject {|m| options.embed_mixins_match?(m.namespace) == false } 18 | # end 19 | # @meths = sort_listing(prune_method_listing(@meths)) 20 | # @meths 21 | # end 22 | --------------------------------------------------------------------------------