├── .devcontainer ├── Dockerfile ├── README.md ├── devcontainer.json ├── docker-compose.yml └── setup.sh ├── .github ├── dependabot.yml └── workflows │ ├── benchmarks.yml │ ├── codeql-analysis.yml │ ├── profile.yml │ ├── rubocop.yml │ └── tests.yml ├── .gitignore ├── .rubocop.yml ├── .rubocop_todo.yml ├── .standard.yml ├── 3.0-Upgrade.md ├── CHANGELOG.md ├── Gemfile ├── LICENSE ├── Performance.md ├── README.md ├── Rakefile ├── bin ├── benchmark ├── console ├── profile └── setup ├── code_of_conduct.md ├── dalli.gemspec ├── lib ├── dalli.rb ├── dalli │ ├── cas │ │ └── client.rb │ ├── client.rb │ ├── compressor.rb │ ├── key_manager.rb │ ├── options.rb │ ├── pid_cache.rb │ ├── pipelined_getter.rb │ ├── protocol.rb │ ├── protocol │ │ ├── base.rb │ │ ├── binary.rb │ │ ├── binary │ │ │ ├── request_formatter.rb │ │ │ ├── response_header.rb │ │ │ ├── response_processor.rb │ │ │ └── sasl_authentication.rb │ │ ├── connection_manager.rb │ │ ├── meta.rb │ │ ├── meta │ │ │ ├── key_regularizer.rb │ │ │ ├── request_formatter.rb │ │ │ └── response_processor.rb │ │ ├── response_buffer.rb │ │ ├── server_config_parser.rb │ │ ├── ttl_sanitizer.rb │ │ ├── value_compressor.rb │ │ ├── value_marshaller.rb │ │ └── value_serializer.rb │ ├── ring.rb │ ├── server.rb │ ├── servers_arg_normalizer.rb │ ├── socket.rb │ └── version.rb └── rack │ └── session │ └── dalli.rb ├── scripts ├── install_memcached.sh └── memcached_1.5.22.patch └── test ├── benchmark_test.rb ├── helper.rb ├── helpers └── memcached.rb ├── integration ├── test_authentication.rb ├── test_cas.rb ├── test_compressor.rb ├── test_concurrency.rb ├── test_connection_pool.rb ├── test_encoding.rb ├── test_failover.rb ├── test_fork_safety.rb ├── test_marshal.rb ├── test_memcached_admin.rb ├── test_namespace_and_key.rb ├── test_network.rb ├── test_operations.rb ├── test_pipelined_get.rb ├── test_quiet.rb ├── test_sasl.rb ├── test_serializer.rb └── test_ttl.rb ├── protocol ├── meta │ └── test_request_formatter.rb ├── test_binary.rb ├── test_connection_manager.rb ├── test_server_config_parser.rb ├── test_ttl_sanitizer.rb ├── test_value_compressor.rb ├── test_value_marshaller.rb └── test_value_serializer.rb ├── sasl ├── memcached.conf └── sasldb ├── test_client_options.rb ├── test_compressor.rb ├── test_digest_class.rb ├── test_fork_safety.rb ├── test_key_manager.rb ├── test_rack_session.rb ├── test_ring.rb ├── test_servers_arg_normalizer.rb └── utils ├── certificate_generator.rb ├── memcached_manager.rb └── memcached_mock.rb /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ruby:3.4-bullseye 2 | 3 | # Install dependencies 4 | RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 5 | && apt-get -y install --no-install-recommends \ 6 | build-essential \ 7 | git \ 8 | libsasl2-dev \ 9 | libsasl2-modules \ 10 | sasl2-bin \ 11 | libevent-dev \ 12 | libmemcached-tools \ 13 | curl \ 14 | procps \ 15 | wget \ 16 | bash \ 17 | && apt-get clean -y \ 18 | && rm -rf /var/lib/apt/lists/* 19 | 20 | # Setup non-root user with sudo access 21 | ARG USERNAME=vscode 22 | ARG USER_UID=1000 23 | ARG USER_GID=$USER_UID 24 | 25 | RUN groupadd --gid $USER_GID $USERNAME \ 26 | && useradd --uid $USER_UID --gid $USER_GID -m $USERNAME \ 27 | && apt-get update \ 28 | && apt-get install -y sudo \ 29 | && echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \ 30 | && chmod 0440 /etc/sudoers.d/$USERNAME 31 | 32 | RUN sudo chown -R $USERNAME:$USERNAME /usr/local/bundle 33 | 34 | # Install utilities 35 | RUN gem install bundler 36 | 37 | WORKDIR /workspace 38 | 39 | # Switch to non-root user 40 | USER $USERNAME 41 | -------------------------------------------------------------------------------- /.devcontainer/README.md: -------------------------------------------------------------------------------- 1 | # Dalli Development Container 2 | 3 | This directory contains configuration for a development container that provides a consistent environment for working on Dalli. 4 | 5 | ## Features 6 | 7 | - Ruby 3.2 environment with all necessary dependencies 8 | - Memcached 1.6.34 installed with SASL and TLS support, matching the GitHub Actions CI environment 9 | - Configuration for testing, including SASL authentication setup 10 | - VS Code extensions for Ruby development 11 | 12 | ## Setup Process 13 | 14 | When the container is built and started, the following setup occurs: 15 | 16 | 1. The container is built with necessary dependencies but without memcached 17 | 2. The `setup.sh` script runs after the container is created which: 18 | - Installs memcached 1.6.34 using the same script used in GitHub Actions 19 | - Configures SASL authentication for testing 20 | - Sets up environment variables needed for tests 21 | - Installs gem dependencies 22 | 23 | ## Running Tests 24 | 25 | Once the container is running, you can run tests with: 26 | 27 | ```bash 28 | bundle exec rake test 29 | ``` 30 | 31 | To run specific test files: 32 | 33 | ```bash 34 | bundle exec ruby -Ilib:test test/path/to/test_file.rb 35 | ``` 36 | 37 | ## Troubleshooting 38 | 39 | If you encounter issues with tests: 40 | 41 | 1. Verify memcached is running: `ps aux | grep memcached` 42 | 2. Check memcached version: `memcached -h | head -1` 43 | 3. Verify SASL is configured: `cat /usr/lib/sasl2/memcached.conf` 44 | 4. Try restarting memcached: `sudo service memcached restart` 45 | 5. Check logs for any errors: `sudo journalctl -u memcached` 46 | 47 | ## Port Forwarding 48 | 49 | The following memcached ports are forwarded for testing: 50 | - 11211 - Default memcached port 51 | - 11212-11215 - Additional ports used by tests 52 | 53 | ## Environment Variables 54 | 55 | - `RUN_SASL_TESTS=1` - Enables SASL authentication tests -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Ruby Dalli Development", 3 | "dockerComposeFile": "docker-compose.yml", 4 | "service": "app", 5 | "workspaceFolder": "/workspace", 6 | "customizations": { 7 | "vscode": { 8 | "extensions": [ 9 | "rebornix.ruby", 10 | "castwide.solargraph", 11 | "kaiwood.endwise", 12 | "misogi.ruby-rubocop" 13 | ], 14 | "settings": { 15 | "ruby.useBundler": true, 16 | "ruby.format": "rubocop", 17 | "editor.formatOnSave": true, 18 | "ruby.useLanguageServer": true, 19 | "ruby.lint": { 20 | "rubocop": { 21 | "useBundler": true 22 | } 23 | }, 24 | "terminal.integrated.defaultProfile.linux": "bash" 25 | } 26 | } 27 | }, 28 | "forwardPorts": [ 29 | 11211, 30 | 11212, 31 | 11213, 32 | 11214, 33 | 11215 34 | ], 35 | "postCreateCommand": "chmod +x .devcontainer/setup.sh && .devcontainer/setup.sh", 36 | "waitFor": "postCreateCommand", 37 | "remoteUser": "vscode" 38 | } -------------------------------------------------------------------------------- /.devcontainer/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | services: 4 | app: 5 | build: 6 | context: . 7 | dockerfile: Dockerfile 8 | volumes: 9 | - ..:/workspace:cached 10 | command: sleep infinity 11 | -------------------------------------------------------------------------------- /.devcontainer/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | echo "Setting up Dalli development environment..." 5 | 6 | # Install memcached using the script from scripts directory 7 | echo "Installing memcached..." 8 | cd /workspace 9 | export MEMCACHED_VERSION=1.6.34 10 | chmod +x scripts/install_memcached.sh 11 | scripts/install_memcached.sh 12 | 13 | # Clean up memcached installation files 14 | echo "Cleaning up memcached installation files..." 15 | rm -f memcached-${MEMCACHED_VERSION}.tar.gz 16 | rm -rf memcached-${MEMCACHED_VERSION} 17 | 18 | # Create symlink for memcached-tool if needed 19 | if [ ! -f /usr/local/bin/memcached-tool ]; then 20 | echo "Creating symlink for memcached-tool..." 21 | sudo ln -sf /usr/share/memcached/scripts/memcached-tool /usr/local/bin/memcached-tool 22 | fi 23 | 24 | echo "Setting up environment variables..." 25 | # Ensure test environment is properly configured 26 | cat >> ~/.bashrc << EOF 27 | 28 | # Dalli test environment 29 | export RUN_SASL_TESTS=1 30 | EOF 31 | 32 | # Fix permissions 33 | sudo chown -R vscode:vscode /usr/local/bundle 34 | echo "Installing dependencies..." 35 | cd /workspace 36 | bundle install 37 | 38 | echo "Environment setup complete!" 39 | echo "You can now run tests with: bundle exec rake test" 40 | echo "To run a specific test file: bundle exec ruby -Ilib:test test/integration/test_fork.rb" 41 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /.github/workflows/benchmarks.yml: -------------------------------------------------------------------------------- 1 | name: Benchmarks 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - uses: actions/checkout@v4 11 | - name: Install Memcached 1.6.23 12 | working-directory: scripts 13 | env: 14 | MEMCACHED_VERSION: 1.6.23 15 | run: | 16 | chmod +x ./install_memcached.sh 17 | ./install_memcached.sh 18 | memcached -d 19 | memcached -d -p 11222 20 | - name: Set up Ruby 21 | uses: ruby/setup-ruby@v1 22 | with: 23 | ruby-version: 3.4 24 | bundler-cache: true # 'bundle install' and cache 25 | - name: Run Benchmarks 26 | run: RUBY_YJIT_ENABLE=1 BENCH_TARGET=all bundle exec bin/benchmark 27 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ main ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ main ] 20 | schedule: 21 | - cron: '22 14 * * 5' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'ruby' ] 36 | 37 | steps: 38 | - name: Checkout repository 39 | uses: actions/checkout@v4 40 | 41 | # Initializes the CodeQL tools for scanning. 42 | - name: Initialize CodeQL 43 | uses: github/codeql-action/init@v3 44 | with: 45 | languages: ${{ matrix.language }} 46 | # If you wish to specify custom queries, you can do so here or in a config file. 47 | # By default, queries listed here will override any specified in a config file. 48 | # Prefix the list here with "+" to use these queries and those in the config file. 49 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 50 | 51 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 52 | # If this step fails, then you should remove it and run the build manually (see below) 53 | - name: Autobuild 54 | uses: github/codeql-action/autobuild@v3 55 | 56 | # ℹ️ Command-line programs to run using the OS shell. 57 | # 📚 https://git.io/JvXDl 58 | 59 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 60 | # and modify them (or add more) to build your code if your project 61 | # uses a compiled language 62 | 63 | #- run: | 64 | # make bootstrap 65 | # make release 66 | 67 | - name: Perform CodeQL Analysis 68 | uses: github/codeql-action/analyze@v3 69 | 70 | -------------------------------------------------------------------------------- /.github/workflows/profile.yml: -------------------------------------------------------------------------------- 1 | name: Profiles 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - uses: actions/checkout@v4 11 | - name: Install Memcached 1.6.23 12 | working-directory: scripts 13 | env: 14 | MEMCACHED_VERSION: 1.6.23 15 | run: | 16 | chmod +x ./install_memcached.sh 17 | ./install_memcached.sh 18 | memcached -d 19 | - name: Set up Ruby 20 | uses: ruby/setup-ruby@v1 21 | with: 22 | ruby-version: 3.4 23 | bundler-cache: true # 'bundle install' and cache 24 | - name: Run Profiles 25 | run: RUBY_YJIT_ENABLE=1 BENCH_TARGET=all bundle exec bin/profile 26 | # NOTE: to pull profile results, visit https://github.com/petergoldstein/dalli/actions/workflows/profile.yml 27 | # click to view the run you are interested in (ex https://github.com/petergoldstein/dalli/actions/runs/13296952241) 28 | # in the artifacts section, download the profile results 29 | - name: Upload profile results 30 | uses: actions/upload-artifact@v4 31 | with: 32 | name: profile-results 33 | path: | 34 | client_get_profile.json 35 | socket_get_profile.json 36 | client_set_profile.json 37 | socket_set_profile.json 38 | client_get_multi_profile.json 39 | socket_get_multi_profile.json 40 | client_set_multi_profile.json 41 | socket_set_multi_profile.json 42 | meta_client_get_multi_profile.json 43 | meta_client_get_profile.json 44 | meta_client_set_multi_profile.json 45 | meta_client_set_profile.json 46 | -------------------------------------------------------------------------------- /.github/workflows/rubocop.yml: -------------------------------------------------------------------------------- 1 | name: RuboCop 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - uses: actions/checkout@v4 11 | - name: Set up Ruby 12 | uses: ruby/setup-ruby@v1 13 | with: 14 | ruby-version: ruby 15 | bundler-cache: true # 'bundle install' and cache 16 | - name: Run RuboCop 17 | run: bundle exec rubocop --parallel --color 18 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | test: 7 | runs-on: ubuntu-latest 8 | 9 | strategy: 10 | fail-fast: false 11 | matrix: 12 | ruby-version: 13 | - head 14 | - '3.4' 15 | - '3.3' 16 | - '3.2' 17 | - '3.1' 18 | - '3.0' 19 | - '2.7' 20 | - '2.6' 21 | - jruby-10 22 | memcached-version: ['1.5.22', '1.6.34'] 23 | 24 | name: "Ruby ${{ matrix.ruby-version }} / Memcached ${{ matrix.memcached-version }}" 25 | steps: 26 | - uses: actions/checkout@v4 27 | - name: Install Memcached ${{ matrix.memcached-version }} 28 | working-directory: scripts 29 | env: 30 | MEMCACHED_VERSION: ${{ matrix.memcached-version }} 31 | run: | 32 | chmod +x ./install_memcached.sh 33 | ./install_memcached.sh 34 | - name: Set up Ruby ${{ matrix.ruby-version }} 35 | uses: ruby/setup-ruby@v1 36 | with: 37 | ruby-version: ${{ matrix.ruby-version }} 38 | bundler-cache: true # 'bundle install' and cache 39 | - name: Run tests 40 | run: bundle exec rake 41 | env: 42 | RUN_SASL_TESTS: 1 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.gem 2 | *.rbc 3 | /.config 4 | /coverage/ 5 | /InstalledFiles 6 | /pkg/ 7 | /spec/reports/ 8 | /test/tmp/ 9 | /test/version_tmp/ 10 | /tmp/ 11 | 12 | ## Specific to RubyMotion: 13 | .dat* 14 | .repl_history 15 | build/ 16 | 17 | ## Documentation cache and generated files: 18 | /.yardoc/ 19 | /_yardoc/ 20 | /doc/ 21 | /html/ 22 | /rdoc/ 23 | profile.html 24 | 25 | ## Environment normalisation: 26 | /.bundle/ 27 | /lib/bundler/man/ 28 | 29 | # for a library or gem, you might want to ignore these files since the code is 30 | # intended to run in multiple environments; otherwise, check them in: 31 | Gemfile.lock 32 | gemfiles/*.lock 33 | .ruby-version 34 | .ruby-gemset 35 | 36 | # unless supporting rvm < 1.11.0 or doing something fancy, ignore this: 37 | .rvmrc 38 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | inherit_from: .rubocop_todo.yml 2 | 3 | plugins: 4 | - rubocop-minitest 5 | - rubocop-performance 6 | - rubocop-rake 7 | 8 | AllCops: 9 | NewCops: enable 10 | TargetRubyVersion: 2.6 11 | 12 | Metrics/BlockLength: 13 | Max: 50 14 | Exclude: 15 | - 'test/**/*' 16 | 17 | Metrics/MethodLength: 18 | Max: 15 19 | 20 | Style/Documentation: 21 | Exclude: 22 | - 'test/**/*' 23 | -------------------------------------------------------------------------------- /.rubocop_todo.yml: -------------------------------------------------------------------------------- 1 | # This configuration was generated by 2 | # `rubocop --auto-gen-config` 3 | # on 2025-04-01 20:25:11 UTC using RuboCop version 1.75.1. 4 | # The point is for the user to remove these configuration records 5 | # one by one as the offenses are removed from the code base. 6 | # Note that changes in the inspected code, or installation of new 7 | # versions of RuboCop, may require this file to be generated again. 8 | 9 | # Offense count: 1 10 | # Configuration parameters: AllowedMethods, AllowedPatterns, CountRepeatedAttributes. 11 | Metrics/AbcSize: 12 | Max: 19 13 | 14 | # Offense count: 9 15 | # Configuration parameters: CountComments, CountAsOne. 16 | Metrics/ClassLength: 17 | Max: 191 18 | 19 | # Offense count: 4 20 | # Configuration parameters: CountComments, CountAsOne, AllowedMethods, AllowedPatterns. 21 | Metrics/MethodLength: 22 | Exclude: 23 | - 'lib/dalli/pipelined_getter.rb' 24 | - 'lib/dalli/protocol/base.rb' 25 | 26 | # Offense count: 1 27 | # Configuration parameters: CountComments, CountAsOne. 28 | Metrics/ModuleLength: 29 | Max: 108 30 | -------------------------------------------------------------------------------- /.standard.yml: -------------------------------------------------------------------------------- 1 | fix: false # default: false 2 | parallel: true # default: false 3 | ruby_version: 2.5.1 # default: RUBY_VERSION 4 | default_ignores: false # default: true 5 | 6 | ignore: # default: [] 7 | - 'test/**/*': 8 | - Style/GlobalVars 9 | - Style/Semicolon 10 | -------------------------------------------------------------------------------- /3.0-Upgrade.md: -------------------------------------------------------------------------------- 1 | # Dalli 3.0 2 | 3 | This major version update contains several backwards incompatible changes. 4 | 5 | * **:dalli_store** has been removed. Users should migrate to the 6 | official Rails **:mem_cache_store**, documented in the [caching 7 | guide](https://guides.rubyonrails.org/caching_with_rails.html#activesupport-cache-memcachestore). 8 | * Attempting to store a larger value than allowed by memcached used to 9 | print a warning and truncate the value. This now raises an error to 10 | prevent silent data corruption. 11 | * Compression now defaults to `true` for large values (greater than 4KB). 12 | This is intended to minimize errors due to the previous note. 13 | * Errors marshalling values now raise rather than just printing an error. 14 | * The Rack session adapter has been refactored to remove support for thread-unsafe 15 | configurations. You will need to include the `connection_pool` gem in 16 | your Gemfile to ensure session operations are thread-safe. 17 | * Support for the `kgio` gem has been removed, it is not relevant in Ruby 2.3+. 18 | * Removed inline native code, use Ruby 2.3+ support for bsearch instead. 19 | * The CAS operations previously in 'dalli/cas/client' have been 20 | integrated into 'dalli/client'. 21 | 22 | ## Future Directions 23 | 24 | The memcached project has deprecated the binary protocol used by Dalli 25 | in favor of a new `meta/text` protocol that is somewhat human readable. 26 | 27 | Dalli 4.0 will move in this direction and require memcached 1.6+. 28 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source 'https://rubygems.org' 4 | 5 | gemspec 6 | 7 | group :development, :test do 8 | gem 'cgi' 9 | gem 'connection_pool' 10 | gem 'debug' unless RUBY_PLATFORM == 'java' 11 | gem 'minitest', '~> 5' 12 | gem 'rack', '~> 2.0', '>= 2.2.0' 13 | gem 'rake', '~> 13.0' 14 | gem 'rubocop' 15 | gem 'rubocop-minitest' 16 | gem 'rubocop-performance' 17 | gem 'rubocop-rake' 18 | gem 'simplecov' 19 | end 20 | 21 | group :test do 22 | gem 'ruby-prof', platform: :mri 23 | end 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) Peter M. Goldstein, Mike Perham 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /Performance.md: -------------------------------------------------------------------------------- 1 | Performance 2 | ==================== 3 | 4 | Caching is all about performance, so I carefully track Dalli performance to ensure no regressions. 5 | You can optionally use kgio to give Dalli a 10-20% performance boost: `gem install kgio`. 6 | 7 | Note I've added some benchmarks over time to Dalli that the other libraries don't necessarily have. 8 | 9 | memcache-client 10 | --------------- 11 | 12 | Testing 1.8.5 with ruby 1.9.3p0 (2011-10-30 revision 33570) [x86_64-darwin11.2.0] 13 | 14 | user system total real 15 | set:plain:memcache-client 1.860000 0.310000 2.170000 ( 2.188030) 16 | set:ruby:memcache-client 1.830000 0.290000 2.120000 ( 2.130212) 17 | get:plain:memcache-client 1.830000 0.340000 2.170000 ( 2.176156) 18 | get:ruby:memcache-client 1.900000 0.330000 2.230000 ( 2.235045) 19 | multiget:ruby:memcache-client 0.860000 0.120000 0.980000 ( 0.987348) 20 | missing:ruby:memcache-client 1.630000 0.320000 1.950000 ( 1.954867) 21 | mixed:ruby:memcache-client 3.690000 0.670000 4.360000 ( 4.364469) 22 | 23 | 24 | dalli 25 | ----- 26 | 27 | Testing with Rails 3.2.1 28 | Using kgio socket IO 29 | Testing 2.0.0 with ruby 1.9.3p125 (2012-02-16 revision 34643) [x86_64-darwin11.3.0] 30 | 31 | user system total real 32 | mixed:rails:dalli 1.580000 0.570000 2.150000 ( 3.008839) 33 | set:plain:dalli 0.730000 0.300000 1.030000 ( 1.567098) 34 | setq:plain:dalli 0.520000 0.120000 0.640000 ( 0.634402) 35 | set:ruby:dalli 0.800000 0.300000 1.100000 ( 1.640348) 36 | get:plain:dalli 0.840000 0.330000 1.170000 ( 1.668425) 37 | get:ruby:dalli 0.850000 0.330000 1.180000 ( 1.665716) 38 | multiget:ruby:dalli 0.700000 0.260000 0.960000 ( 0.965423) 39 | missing:ruby:dalli 0.720000 0.320000 1.040000 ( 1.511720) 40 | mixed:ruby:dalli 1.660000 0.640000 2.300000 ( 3.320743) 41 | mixedq:ruby:dalli 1.630000 0.510000 2.140000 ( 2.629734) 42 | incr:ruby:dalli 0.270000 0.100000 0.370000 ( 0.547618) 43 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Dalli [![Tests](https://github.com/petergoldstein/dalli/actions/workflows/tests.yml/badge.svg)](https://github.com/petergoldstein/dalli/actions/workflows/tests.yml) 2 | ===== 3 | 4 | Dalli is a high performance pure Ruby client for accessing memcached servers. 5 | 6 | Dalli supports: 7 | 8 | * Simple and complex memcached configurations 9 | * Failover between memcached instances 10 | * Fine-grained control of data serialization and compression 11 | * Thread-safe operation (either through use of a connection pool, or by using the Dalli client in threadsafe mode) 12 | * SSL/TLS connections to memcached 13 | * SASL authentication 14 | 15 | The name is a variant of Salvador Dali for his famous painting [The Persistence of Memory](http://en.wikipedia.org/wiki/The_Persistence_of_Memory). 16 | 17 | ![Persistence of Memory](https://upload.wikimedia.org/wikipedia/en/d/dd/The_Persistence_of_Memory.jpg) 18 | 19 | 20 | ## Documentation and Information 21 | 22 | * [User Documentation](https://github.com/petergoldstein/dalli/wiki) - The documentation is maintained in the repository's wiki. 23 | * [Announcements](https://github.com/petergoldstein/dalli/discussions/categories/announcements) - Announcements of interest to the Dalli community will be posted here. 24 | * [Bug Reports](https://github.com/petergoldstein/dalli/issues) - If you discover a problem with Dalli, please submit a bug report in the tracker. 25 | * [Forum](https://github.com/petergoldstein/dalli/discussions/categories/q-a) - If you have questions about Dalli, please post them here. 26 | * [Client API](https://www.rubydoc.info/gems/dalli) - Ruby documentation for the `Dalli::Client` API 27 | 28 | ## Development 29 | 30 | After checking out the repo, run `bin/setup` to install dependencies. You can run `bin/console` for an interactive prompt that will allow you to experiment. 31 | 32 | To install this gem onto your local machine, run `bundle exec rake install`. 33 | 34 | ## Contributing 35 | 36 | If you have a fix you wish to provide, please fork the code, fix in your local project and then send a pull request on github. Please ensure that you include a test which verifies your fix and update the [changelog](CHANGELOG.md) with a one sentence description of your fix so you get credit as a contributor. 37 | 38 | ## Appreciation 39 | 40 | Dalli would not exist in its current form without the contributions of many people. But special thanks go to several individuals and organizations: 41 | 42 | * Mike Perham - for originally authoring the Dalli project and serving as maintainer and primary contributor for many years 43 | * Eric Wong - for help using his [kgio](http://bogomips.org/kgio/) library. 44 | * Brian Mitchell - for his remix-stash project which was helpful when implementing and testing the binary protocol support. 45 | * [CouchBase](http://couchbase.com) - for their sponsorship of the original development 46 | 47 | 48 | ## Authors 49 | 50 | * [Peter M. Goldstein](https://github.com/petergoldstein) - current maintainer 51 | * [Mike Perham](https://github.com/mperham) and contributors 52 | 53 | 54 | ## Copyright 55 | 56 | Copyright (c) Mike Perham, Peter M. Goldstein. See LICENSE for details. 57 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'bundler/gem_tasks' 4 | require 'rake/testtask' 5 | 6 | Rake::TestTask.new(:test) do |test| 7 | test.pattern = 'test/**/test_*.rb' 8 | test.warning = true 9 | test.verbose = true 10 | end 11 | task default: :test 12 | 13 | Rake::TestTask.new(:bench) do |test| 14 | test.pattern = 'test/benchmark_test.rb' 15 | end 16 | -------------------------------------------------------------------------------- /bin/console: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | require 'bundler/setup' 5 | require 'dalli' 6 | 7 | # You can add fixtures and/or initialization code here to make experimenting 8 | # with your gem easier. You can also use a different console, if you like. 9 | 10 | # (If you use this, don't forget to add pry to your Gemfile!) 11 | # require "pry" 12 | # Pry.start 13 | 14 | require 'irb' 15 | IRB.start(__FILE__) 16 | -------------------------------------------------------------------------------- /bin/profile: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | # This helps profile specific call paths in Dalli 5 | # finding and fixing performance issues in these profiles should result in improvements in the dalli benchmarks 6 | # 7 | # run with: 8 | # RUBY_YJIT_ENABLE=1 bundle exec bin/profile 9 | require 'bundler/inline' 10 | 11 | gemfile do 12 | source 'https://rubygems.org' 13 | gem 'benchmark-ips' 14 | gem 'vernier' 15 | gem 'logger' 16 | end 17 | 18 | require 'json' 19 | require 'benchmark/ips' 20 | require 'vernier' 21 | require_relative '../lib/dalli' 22 | 23 | ## 24 | # NoopSerializer is a serializer that avoids the overhead of Marshal or JSON. 25 | ## 26 | class NoopSerializer 27 | def self.dump(value) 28 | value 29 | end 30 | 31 | def self.load(value) 32 | value 33 | end 34 | end 35 | 36 | dalli_url = ENV['BENCH_CACHE_URL'] || '127.0.0.1:11211' 37 | bench_target = ENV['BENCH_TARGET'] || 'all' 38 | bench_time = (ENV['BENCH_TIME'] || 8).to_i 39 | bench_payload_size = (ENV['BENCH_PAYLOAD_SIZE'] || 700_000).to_i 40 | TERMINATOR = "\r\n" 41 | puts "yjit: #{RubyVM::YJIT.enabled?}" 42 | 43 | client = Dalli::Client.new(dalli_url, serializer: NoopSerializer, compress: false) 44 | meta_client = Dalli::Client.new(dalli_url, protocol: :meta, serializer: NoopSerializer, compress: false, raw: true) 45 | 46 | # The raw socket implementation is used to benchmark the performance of dalli & the overhead of the various abstractions 47 | # in the library. 48 | sock = TCPSocket.new('127.0.0.1', '11211', connect_timeout: 1) 49 | sock.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, true) 50 | sock.setsockopt(Socket::SOL_SOCKET, Socket::SO_KEEPALIVE, true) 51 | # Benchmarks didn't see any performance gains from increasing the SO_RCVBUF buffer size 52 | # sock.setsockopt(Socket::SOL_SOCKET, ::Socket::SO_RCVBUF, 1024 * 1024 * 8) 53 | # Benchmarks did see an improvement in performance when increasing the SO_SNDBUF buffer size 54 | # sock.setsockopt(Socket::SOL_SOCKET, Socket::SO_SNDBUF, 1024 * 1024 * 8) 55 | 56 | payload = 'B' * bench_payload_size 57 | dalli_key = 'dalli_key' 58 | # ensure the clients are all connected and working 59 | client.set(dalli_key, payload) 60 | meta_client.set(dalli_key, payload) 61 | sock.write("set sock_key 0 3600 #{payload.bytesize}\r\n") 62 | sock.write(payload) 63 | sock.write(TERMINATOR) 64 | sock.flush 65 | sock.readline # clear the buffer 66 | 67 | # ensure we have basic data for the benchmarks and get calls 68 | payload_smaller = 'B' * (bench_payload_size / 10) 69 | pairs = {} 70 | 100.times do |i| 71 | pairs["multi_#{i}"] = payload_smaller 72 | end 73 | client.quiet do 74 | pairs.each do |key, value| 75 | client.set(key, value, 3600, raw: true) 76 | end 77 | end 78 | 79 | # rubocop:disable Metrics/MethodLength 80 | # rubocop:disable Metrics/PerceivedComplexity 81 | # rubocop:disable Metrics/AbcSize 82 | # rubocop:disable Metrics/CyclomaticComplexity 83 | def sock_get_multi(sock, pairs) 84 | count = pairs.length 85 | pairs.each_key do |key| 86 | count -= 1 87 | tail = count.zero? ? '' : 'q' 88 | sock.write("mg #{key} v f k #{tail}\r\n") 89 | end 90 | sock.flush 91 | # read all the memcached responses back and build a hash of key value pairs 92 | results = {} 93 | last_result = false 94 | while (line = sock.readline.chomp!(TERMINATOR)) != '' 95 | last_result = true if line.start_with?('EN ') 96 | next unless line.start_with?('VA ') || last_result 97 | 98 | _, value_length, _flags, key = line.split 99 | results[key[1..]] = sock.read(value_length.to_i) 100 | sock.read(TERMINATOR.length) 101 | break if results.size == pairs.size 102 | break if last_result 103 | end 104 | results 105 | end 106 | # rubocop:enable Metrics/PerceivedComplexity 107 | # rubocop:enable Metrics/AbcSize 108 | # rubocop:enable Metrics/CyclomaticComplexity 109 | 110 | def sock_set_multi(sock, pairs) 111 | count = pairs.length 112 | tail = '' 113 | ttl = 3600 114 | 115 | pairs.each do |key, value| 116 | count -= 1 117 | tail = count.zero? ? '' : 'q' 118 | sock.write(String.new("ms #{key} #{value.bytesize} c F0 T#{ttl} MS #{tail}\r\n", 119 | capacity: key.size + value.bytesize + 40)) 120 | sock.write(value) 121 | sock.write(TERMINATOR) 122 | end 123 | sock.flush 124 | sock.gets(TERMINATOR) # clear the buffer 125 | end 126 | # rubocop:enable Metrics/MethodLength 127 | 128 | if %w[all get].include?(bench_target) 129 | Vernier.profile(out: 'client_get_profile.json') do 130 | start_time = Time.now 131 | while Time.now - start_time < bench_time 132 | result = client.get(dalli_key) 133 | raise 'mismatch' unless result == payload 134 | end 135 | end 136 | 137 | Vernier.profile(out: 'meta_client_get_profile.json') do 138 | start_time = Time.now 139 | while Time.now - start_time < bench_time 140 | result = meta_client.get(dalli_key) 141 | raise 'mismatch' unless result == payload 142 | end 143 | end 144 | 145 | Vernier.profile(out: 'socket_get_profile.json') do 146 | start_time = Time.now 147 | while Time.now - start_time < bench_time 148 | sock.write("mg sock_key v\r\n") 149 | sock.readline 150 | result = sock.read(payload.bytesize) 151 | sock.read(TERMINATOR.bytesize) 152 | raise 'mismatch' unless result == payload 153 | end 154 | end 155 | end 156 | 157 | if %w[all set].include?(bench_target) 158 | Vernier.profile(out: 'client_set_profile.json') do 159 | start_time = Time.now 160 | client.set(dalli_key, payload, 3600, raw: true) while Time.now - start_time < bench_time 161 | end 162 | 163 | Vernier.profile(out: 'meta_client_set_profile.json') do 164 | start_time = Time.now 165 | meta_client.set(dalli_key, payload, 3600, raw: true) while Time.now - start_time < bench_time 166 | end 167 | 168 | Vernier.profile(out: 'socket_set_profile.json') do 169 | start_time = Time.now 170 | while Time.now - start_time < bench_time 171 | sock.write("ms sock_key #{payload.bytesize} T3600 MS\r\n") 172 | sock.write(payload) 173 | sock.write("\r\n") 174 | sock.flush 175 | sock.readline # clear the buffer 176 | end 177 | end 178 | end 179 | 180 | if %w[all get_multi].include?(bench_target) 181 | Vernier.profile(out: 'client_get_multi_profile.json') do 182 | start_time = Time.now 183 | while Time.now - start_time < bench_time 184 | result = client.get_multi(pairs.keys) 185 | raise 'mismatch' unless result == pairs 186 | end 187 | end 188 | 189 | Vernier.profile(out: 'meta_client_get_multi_profile.json') do 190 | start_time = Time.now 191 | while Time.now - start_time < bench_time 192 | result = meta_client.get_multi(pairs.keys) 193 | raise 'mismatch' unless result == pairs 194 | end 195 | end 196 | 197 | Vernier.profile(out: 'socket_get_multi_profile.json') do 198 | start_time = Time.now 199 | while Time.now - start_time < bench_time 200 | result = sock_get_multi(sock, pairs) 201 | raise 'mismatch' unless result == pairs 202 | end 203 | end 204 | end 205 | 206 | if %w[all set_multi].include?(bench_target) 207 | Vernier.profile(out: 'client_set_multi_profile.json') do 208 | start_time = Time.now 209 | # until we port over set_multi, compare the simple loop 210 | # client.set_multi(pairs, 3600, raw: true) while Time.now - start_time < bench_time 211 | while Time.now - start_time < bench_time 212 | pairs.each do |key, value| 213 | client.set(key, value, 3600, raw: true) 214 | end 215 | end 216 | end 217 | 218 | Vernier.profile(out: 'meta_client_set_multi_profile.json') do 219 | start_time = Time.now 220 | while Time.now - start_time < bench_time 221 | pairs.each do |key, value| 222 | meta_client.set(key, value, 3600, raw: true) 223 | end 224 | end 225 | end 226 | 227 | Vernier.profile(out: 'socket_set_multi_profile.json') do 228 | start_time = Time.now 229 | sock_set_multi(sock, pairs) while Time.now - start_time < bench_time 230 | end 231 | end 232 | -------------------------------------------------------------------------------- /bin/setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | IFS=$'\n\t' 4 | set -vx 5 | 6 | bundle install 7 | 8 | # Do any other automated setup that you need to do here 9 | -------------------------------------------------------------------------------- /code_of_conduct.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | As contributors and maintainers of this project, and in the interest of 4 | fostering an open and welcoming community, we pledge to respect all people who 5 | contribute through reporting issues, posting feature requests, updating 6 | documentation, submitting pull requests or patches, and other activities. 7 | 8 | We are committed to making participation in this project a harassment-free 9 | experience for everyone, regardless of level of experience, gender, gender 10 | identity and expression, sexual orientation, disability, personal appearance, 11 | body size, race, ethnicity, age, religion, or nationality. 12 | 13 | Examples of unacceptable behavior by participants include: 14 | 15 | * The use of sexualized language or imagery 16 | * Personal attacks 17 | * Trolling or insulting/derogatory comments 18 | * Public or private harassment 19 | * Publishing other's private information, such as physical or electronic 20 | addresses, without explicit permission 21 | * Other unethical or unprofessional conduct 22 | 23 | Project maintainers have the right and responsibility to remove, edit, or 24 | reject comments, commits, code, wiki edits, issues, and other contributions 25 | that are not aligned to this Code of Conduct, or to ban temporarily or 26 | permanently any contributor for other behaviors that they deem inappropriate, 27 | threatening, offensive, or harmful. 28 | 29 | By adopting this Code of Conduct, project maintainers commit themselves to 30 | fairly and consistently applying these principles to every aspect of managing 31 | this project. Project maintainers who do not follow or enforce the Code of 32 | Conduct may be permanently removed from the project team. 33 | 34 | This Code of Conduct applies both within project spaces and in public spaces 35 | when an individual is representing the project or its community. 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 38 | reported by contacting the project maintainer at peter.m.goldstein AT gmail.com. All 39 | complaints will be reviewed and investigated and will result in a response that 40 | is deemed necessary and appropriate to the circumstances. Maintainers are 41 | obligated to maintain confidentiality with regard to the reporter of an 42 | incident. 43 | 44 | 45 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 46 | version 1.3.0, available at 47 | [http://contributor-covenant.org/version/1/3/0/][version] 48 | 49 | [homepage]: http://contributor-covenant.org 50 | [version]: http://contributor-covenant.org/version/1/3/0/ 51 | -------------------------------------------------------------------------------- /dalli.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require './lib/dalli/version' 4 | 5 | Gem::Specification.new do |s| 6 | s.name = 'dalli' 7 | s.version = Dalli::VERSION 8 | s.license = 'MIT' 9 | 10 | s.authors = ['Peter M. Goldstein', 'Mike Perham'] 11 | s.description = s.summary = 'High performance memcached client for Ruby' 12 | s.email = ['peter.m.goldstein@gmail.com', 'mperham@gmail.com'] 13 | s.files = Dir.glob('lib/**/*') + [ 14 | 'LICENSE', 15 | 'README.md', 16 | 'CHANGELOG.md', 17 | 'Gemfile' 18 | ] 19 | s.homepage = 'https://github.com/petergoldstein/dalli' 20 | s.required_ruby_version = '>= 2.6' 21 | 22 | s.metadata = { 23 | 'bug_tracker_uri' => 'https://github.com/petergoldstein/dalli/issues', 24 | 'changelog_uri' => 'https://github.com/petergoldstein/dalli/blob/main/CHANGELOG.md', 25 | 'rubygems_mfa_required' => 'true' 26 | } 27 | 28 | s.add_dependency 'logger' 29 | end 30 | -------------------------------------------------------------------------------- /lib/dalli.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | ## 4 | # Namespace for all Dalli code. 5 | ## 6 | module Dalli 7 | autoload :Server, 'dalli/server' 8 | 9 | # generic error 10 | class DalliError < RuntimeError; end 11 | 12 | # socket/server communication error 13 | class NetworkError < DalliError; end 14 | 15 | # no server available/alive error 16 | class RingError < DalliError; end 17 | 18 | # application error in marshalling serialization 19 | class MarshalError < DalliError; end 20 | 21 | # application error in marshalling deserialization or decompression 22 | class UnmarshalError < DalliError; end 23 | 24 | # payload too big for memcached 25 | class ValueOverMaxSize < DalliError; end 26 | 27 | # operation is not permitted in a multi block 28 | class NotPermittedMultiOpError < DalliError; end 29 | 30 | # raised when Memcached response with a SERVER_ERROR 31 | class ServerError < DalliError; end 32 | 33 | # Implements the NullObject pattern to store an application-defined value for 'Key not found' responses. 34 | class NilObject; end # rubocop:disable Lint/EmptyClass 35 | NOT_FOUND = NilObject.new 36 | 37 | QUIET = :dalli_multi 38 | 39 | def self.logger 40 | @logger ||= rails_logger || default_logger 41 | end 42 | 43 | def self.rails_logger 44 | (defined?(Rails) && Rails.respond_to?(:logger) && Rails.logger) || 45 | (defined?(RAILS_DEFAULT_LOGGER) && RAILS_DEFAULT_LOGGER.respond_to?(:debug) && RAILS_DEFAULT_LOGGER) 46 | end 47 | 48 | def self.default_logger 49 | require 'logger' 50 | l = Logger.new($stdout) 51 | l.level = Logger::INFO 52 | l 53 | end 54 | 55 | def self.logger=(logger) 56 | @logger = logger 57 | end 58 | end 59 | 60 | require_relative 'dalli/version' 61 | 62 | require_relative 'dalli/compressor' 63 | require_relative 'dalli/client' 64 | require_relative 'dalli/key_manager' 65 | require_relative 'dalli/pipelined_getter' 66 | require_relative 'dalli/ring' 67 | require_relative 'dalli/protocol' 68 | require_relative 'dalli/protocol/base' 69 | require_relative 'dalli/protocol/binary' 70 | require_relative 'dalli/protocol/connection_manager' 71 | require_relative 'dalli/protocol/meta' 72 | require_relative 'dalli/protocol/response_buffer' 73 | require_relative 'dalli/protocol/server_config_parser' 74 | require_relative 'dalli/protocol/ttl_sanitizer' 75 | require_relative 'dalli/protocol/value_compressor' 76 | require_relative 'dalli/protocol/value_marshaller' 77 | require_relative 'dalli/protocol/value_serializer' 78 | require_relative 'dalli/servers_arg_normalizer' 79 | require_relative 'dalli/socket' 80 | require_relative 'dalli/options' 81 | -------------------------------------------------------------------------------- /lib/dalli/cas/client.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | puts "You can remove `require 'dalli/cas/client'` as this code has been rolled into the standard 'dalli/client'." 4 | -------------------------------------------------------------------------------- /lib/dalli/compressor.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'zlib' 4 | require 'stringio' 5 | 6 | module Dalli 7 | ## 8 | # Default compressor used by Dalli, that uses 9 | # Zlib DEFLATE to compress data. 10 | ## 11 | class Compressor 12 | def self.compress(data) 13 | Zlib::Deflate.deflate(data) 14 | end 15 | 16 | def self.decompress(data) 17 | Zlib::Inflate.inflate(data) 18 | end 19 | end 20 | 21 | ## 22 | # Alternate compressor for Dalli, that uses 23 | # Gzip. Gzip adds a checksum to each compressed 24 | # entry. 25 | ## 26 | class GzipCompressor 27 | def self.compress(data) 28 | io = StringIO.new(+'', 'w') 29 | gz = Zlib::GzipWriter.new(io) 30 | gz.write(data) 31 | gz.close 32 | io.string 33 | end 34 | 35 | def self.decompress(data) 36 | io = StringIO.new(data, 'rb') 37 | Zlib::GzipReader.new(io).read 38 | end 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/dalli/key_manager.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'digest/md5' 4 | 5 | module Dalli 6 | ## 7 | # This class manages and validates keys sent to Memcached, ensuring 8 | # that they meet Memcached key length requirements, and supporting 9 | # the implementation of optional namespaces on a per-Dalli client 10 | # basis. 11 | ## 12 | class KeyManager 13 | MAX_KEY_LENGTH = 250 14 | 15 | NAMESPACE_SEPARATOR = ':' 16 | 17 | # This is a hard coded md5 for historical reasons 18 | TRUNCATED_KEY_SEPARATOR = ':md5:' 19 | 20 | # This is 249 for historical reasons 21 | TRUNCATED_KEY_TARGET_SIZE = 249 22 | 23 | DEFAULTS = { 24 | digest_class: ::Digest::MD5 25 | }.freeze 26 | 27 | OPTIONS = %i[digest_class namespace].freeze 28 | 29 | attr_reader :namespace 30 | 31 | def initialize(client_options) 32 | @key_options = 33 | DEFAULTS.merge(client_options.slice(*OPTIONS)) 34 | validate_digest_class_option(@key_options) 35 | 36 | @namespace = namespace_from_options 37 | end 38 | 39 | ## 40 | # Validates the key, and transforms as needed. 41 | # 42 | # If the key is nil or empty, raises ArgumentError. Whitespace 43 | # characters are allowed for historical reasons, but likely shouldn't 44 | # be used. 45 | # If the key (with namespace) is shorter than the memcached maximum 46 | # allowed key length, just returns the argument key 47 | # Otherwise computes a "truncated" key that uses a truncated prefix 48 | # combined with a 32-byte hex digest of the whole key. 49 | ## 50 | def validate_key(key) 51 | raise ArgumentError, 'key cannot be blank' unless key&.length&.positive? 52 | 53 | key = key_with_namespace(key) 54 | key.length > MAX_KEY_LENGTH ? truncated_key(key) : key 55 | end 56 | 57 | ## 58 | # Returns the key with the namespace prefixed, if a namespace is 59 | # defined. Otherwise just returns the key 60 | ## 61 | def key_with_namespace(key) 62 | return key if namespace.nil? 63 | 64 | "#{evaluate_namespace}#{NAMESPACE_SEPARATOR}#{key}" 65 | end 66 | 67 | def key_without_namespace(key) 68 | return key if namespace.nil? 69 | 70 | key.sub(namespace_regexp, '') 71 | end 72 | 73 | def digest_class 74 | @digest_class ||= @key_options[:digest_class] 75 | end 76 | 77 | def namespace_regexp 78 | return /\A#{Regexp.escape(evaluate_namespace)}:/ if namespace.is_a?(Proc) 79 | 80 | @namespace_regexp ||= /\A#{Regexp.escape(namespace)}:/.freeze unless namespace.nil? 81 | end 82 | 83 | def validate_digest_class_option(opts) 84 | return if opts[:digest_class].respond_to?(:hexdigest) 85 | 86 | raise ArgumentError, 'The digest_class object must respond to the hexdigest method' 87 | end 88 | 89 | def namespace_from_options 90 | raw_namespace = @key_options[:namespace] 91 | return nil unless raw_namespace 92 | return raw_namespace.to_s unless raw_namespace.is_a?(Proc) 93 | 94 | raw_namespace 95 | end 96 | 97 | def evaluate_namespace 98 | return namespace.call.to_s if namespace.is_a?(Proc) 99 | 100 | namespace 101 | end 102 | 103 | ## 104 | # Produces a truncated key, if the raw key is longer than the maximum allowed 105 | # length. The truncated key is produced by generating a hex digest 106 | # of the key, and appending that to a truncated section of the key. 107 | ## 108 | def truncated_key(key) 109 | digest = digest_class.hexdigest(key) 110 | "#{key[0, prefix_length(digest)]}#{TRUNCATED_KEY_SEPARATOR}#{digest}" 111 | end 112 | 113 | def prefix_length(digest) 114 | return TRUNCATED_KEY_TARGET_SIZE - (TRUNCATED_KEY_SEPARATOR.length + digest.length) if namespace.nil? 115 | 116 | # For historical reasons, truncated keys with namespaces had a length of 250 rather 117 | # than 249 118 | TRUNCATED_KEY_TARGET_SIZE + 1 - (TRUNCATED_KEY_SEPARATOR.length + digest.length) 119 | end 120 | end 121 | end 122 | -------------------------------------------------------------------------------- /lib/dalli/options.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'monitor' 4 | 5 | module Dalli 6 | # Make Dalli threadsafe by using a lock around all 7 | # public server methods. 8 | # 9 | # Dalli::Protocol::Binary.extend(Dalli::Threadsafe) 10 | # 11 | module Threadsafe 12 | def self.extended(obj) 13 | obj.init_threadsafe 14 | end 15 | 16 | def request(opcode, *args) 17 | @lock.synchronize do 18 | super 19 | end 20 | end 21 | 22 | def alive? 23 | @lock.synchronize do 24 | super 25 | end 26 | end 27 | 28 | def close 29 | @lock.synchronize do 30 | super 31 | end 32 | end 33 | 34 | def pipeline_response_setup 35 | @lock.synchronize do 36 | super 37 | end 38 | end 39 | 40 | def pipeline_next_responses 41 | @lock.synchronize do 42 | super 43 | end 44 | end 45 | 46 | def pipeline_abort 47 | @lock.synchronize do 48 | super 49 | end 50 | end 51 | 52 | def lock! 53 | @lock.mon_enter 54 | end 55 | 56 | def unlock! 57 | @lock.mon_exit 58 | end 59 | 60 | def init_threadsafe 61 | @lock = Monitor.new 62 | end 63 | end 64 | end 65 | -------------------------------------------------------------------------------- /lib/dalli/pid_cache.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Dalli 4 | ## 5 | # Dalli::PIDCache is a wrapper class for PID checking to avoid system calls when checking the PID. 6 | ## 7 | module PIDCache 8 | if !Process.respond_to?(:fork) # JRuby or TruffleRuby 9 | @pid = Process.pid 10 | singleton_class.attr_reader(:pid) 11 | elsif Process.respond_to?(:_fork) # Ruby 3.1+ 12 | class << self 13 | attr_reader :pid 14 | 15 | def update! 16 | @pid = Process.pid 17 | end 18 | end 19 | update! 20 | 21 | ## 22 | # Dalli::PIDCache::CoreExt hooks into Process to be able to reset the PID cache after fork 23 | ## 24 | module CoreExt 25 | def _fork 26 | child_pid = super 27 | PIDCache.update! if child_pid.zero? 28 | child_pid 29 | end 30 | end 31 | Process.singleton_class.prepend(CoreExt) 32 | else # Ruby 3.0 or older 33 | class << self 34 | def pid 35 | Process.pid 36 | end 37 | end 38 | end 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/dalli/pipelined_getter.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Dalli 4 | ## 5 | # Contains logic for the pipelined gets implemented by the client. 6 | ## 7 | class PipelinedGetter 8 | def initialize(ring, key_manager) 9 | @ring = ring 10 | @key_manager = key_manager 11 | end 12 | 13 | ## 14 | # Yields, one at a time, keys and their values+attributes. 15 | # 16 | def process(keys, &block) 17 | return {} if keys.empty? 18 | 19 | @ring.lock do 20 | servers = setup_requests(keys) 21 | start_time = Time.now 22 | servers = fetch_responses(servers, start_time, @ring.socket_timeout, &block) until servers.empty? 23 | end 24 | rescue NetworkError => e 25 | Dalli.logger.debug { e.inspect } 26 | Dalli.logger.debug { 'retrying pipelined gets because of timeout' } 27 | retry 28 | end 29 | 30 | def setup_requests(keys) 31 | groups = groups_for_keys(keys) 32 | make_getkq_requests(groups) 33 | 34 | # TODO: How does this exit on a NetworkError 35 | finish_queries(groups.keys) 36 | end 37 | 38 | ## 39 | # Loop through the server-grouped sets of keys, writing 40 | # the corresponding getkq requests to the appropriate servers 41 | # 42 | # It's worth noting that we could potentially reduce bytes 43 | # on the wire by switching from getkq to getq, and using 44 | # the opaque value to match requests to responses. 45 | ## 46 | def make_getkq_requests(groups) 47 | groups.each do |server, keys_for_server| 48 | server.request(:pipelined_get, keys_for_server) 49 | rescue DalliError, NetworkError => e 50 | Dalli.logger.debug { e.inspect } 51 | Dalli.logger.debug { "unable to get keys for server #{server.name}" } 52 | end 53 | end 54 | 55 | ## 56 | # This loops through the servers that have keys in 57 | # our set, sending the noop to terminate the set of queries. 58 | ## 59 | def finish_queries(servers) 60 | deleted = [] 61 | 62 | servers.each do |server| 63 | next unless server.connected? 64 | 65 | begin 66 | finish_query_for_server(server) 67 | rescue Dalli::NetworkError 68 | raise 69 | rescue Dalli::DalliError 70 | deleted.append(server) 71 | end 72 | end 73 | 74 | servers.delete_if { |server| deleted.include?(server) } 75 | rescue Dalli::NetworkError 76 | abort_without_timeout(servers) 77 | raise 78 | end 79 | 80 | def finish_query_for_server(server) 81 | server.pipeline_response_setup 82 | rescue Dalli::NetworkError 83 | raise 84 | rescue Dalli::DalliError => e 85 | Dalli.logger.debug { e.inspect } 86 | Dalli.logger.debug { "Results from server: #{server.name} will be missing from the results" } 87 | raise 88 | end 89 | 90 | # Swallows Dalli::NetworkError 91 | def abort_without_timeout(servers) 92 | servers.each(&:pipeline_abort) 93 | end 94 | 95 | def fetch_responses(servers, start_time, timeout, &block) 96 | # Remove any servers which are not connected 97 | servers.delete_if { |s| !s.connected? } 98 | return [] if servers.empty? 99 | 100 | time_left = remaining_time(start_time, timeout) 101 | readable_servers = servers_with_response(servers, time_left) 102 | if readable_servers.empty? 103 | abort_with_timeout(servers) 104 | return [] 105 | end 106 | 107 | # Loop through the servers with responses, and 108 | # delete any from our list that are finished 109 | readable_servers.each do |server| 110 | servers.delete(server) if process_server(server, &block) 111 | end 112 | servers 113 | rescue NetworkError 114 | # Abort and raise if we encountered a network error. This triggers 115 | # a retry at the top level. 116 | abort_without_timeout(servers) 117 | raise 118 | end 119 | 120 | def remaining_time(start, timeout) 121 | elapsed = Time.now - start 122 | return 0 if elapsed > timeout 123 | 124 | timeout - elapsed 125 | end 126 | 127 | # Swallows Dalli::NetworkError 128 | def abort_with_timeout(servers) 129 | abort_without_timeout(servers) 130 | servers.each do |server| 131 | Dalli.logger.debug { "memcached at #{server.name} did not response within timeout" } 132 | end 133 | 134 | true # Required to simplify caller 135 | end 136 | 137 | # Processes responses from a server. Returns true if there are no 138 | # additional responses from this server. 139 | def process_server(server) 140 | server.pipeline_next_responses.each_pair do |key, value_list| 141 | yield @key_manager.key_without_namespace(key), value_list 142 | end 143 | 144 | server.pipeline_complete? 145 | end 146 | 147 | def servers_with_response(servers, timeout) 148 | return [] if servers.empty? 149 | 150 | # TODO: - This is a bit challenging. Essentially the PipelinedGetter 151 | # is a reactor, but without the benefit of a Fiber or separate thread. 152 | # My suspicion is that we may want to try and push this down into the 153 | # individual servers, but I'm not sure. For now, we keep the 154 | # mapping between the alerted object (the socket) and the 155 | # corrresponding server here. 156 | server_map = servers.each_with_object({}) { |s, h| h[s.sock] = s } 157 | 158 | readable, = IO.select(server_map.keys, nil, nil, timeout) 159 | return [] if readable.nil? 160 | 161 | readable.map { |sock| server_map[sock] } 162 | end 163 | 164 | def groups_for_keys(*keys) 165 | keys.flatten! 166 | keys.map! { |a| @key_manager.validate_key(a.to_s) } 167 | groups = @ring.keys_grouped_by_server(keys) 168 | if (unfound_keys = groups.delete(nil)) 169 | Dalli.logger.debug do 170 | "unable to get keys for #{unfound_keys.length} keys " \ 171 | 'because no matching server was found' 172 | end 173 | end 174 | groups 175 | end 176 | end 177 | end 178 | -------------------------------------------------------------------------------- /lib/dalli/protocol.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'timeout' 4 | 5 | module Dalli 6 | module Protocol 7 | # Preserved for backwards compatibility. Should be removed in 4.0 8 | NOT_FOUND = ::Dalli::NOT_FOUND 9 | 10 | # Ruby 3.2 raises IO::TimeoutError on blocking reads/writes, but 11 | # it is not defined in earlier Ruby versions. 12 | TIMEOUT_ERRORS = 13 | if defined?(IO::TimeoutError) 14 | [Timeout::Error, IO::TimeoutError] 15 | else 16 | [Timeout::Error] 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/dalli/protocol/binary.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'forwardable' 4 | require 'socket' 5 | require 'timeout' 6 | 7 | module Dalli 8 | module Protocol 9 | ## 10 | # Access point for a single Memcached server, accessed via Memcached's binary 11 | # protocol. Contains logic for managing connection state to the server (retries, etc), 12 | # formatting requests to the server, and unpacking responses. 13 | ## 14 | class Binary < Base 15 | def response_processor 16 | @response_processor ||= ResponseProcessor.new(@connection_manager, @value_marshaller) 17 | end 18 | 19 | private 20 | 21 | # Retrieval Commands 22 | def get(key, options = nil) 23 | req = RequestFormatter.standard_request(opkey: :get, key: key) 24 | write(req) 25 | response_processor.get(cache_nils: cache_nils?(options)) 26 | end 27 | 28 | def quiet_get_request(key) 29 | RequestFormatter.standard_request(opkey: :getkq, key: key) 30 | end 31 | 32 | def gat(key, ttl, options = nil) 33 | ttl = TtlSanitizer.sanitize(ttl) 34 | req = RequestFormatter.standard_request(opkey: :gat, key: key, ttl: ttl) 35 | write(req) 36 | response_processor.get(cache_nils: cache_nils?(options)) 37 | end 38 | 39 | def touch(key, ttl) 40 | ttl = TtlSanitizer.sanitize(ttl) 41 | write(RequestFormatter.standard_request(opkey: :touch, key: key, ttl: ttl)) 42 | response_processor.generic_response 43 | end 44 | 45 | # TODO: This is confusing, as there's a cas command in memcached 46 | # and this isn't it. Maybe rename? Maybe eliminate? 47 | def cas(key) 48 | req = RequestFormatter.standard_request(opkey: :get, key: key) 49 | write(req) 50 | response_processor.data_cas_response 51 | end 52 | 53 | # Storage Commands 54 | def set(key, value, ttl, cas, options) 55 | opkey = quiet? ? :setq : :set 56 | storage_req(opkey, key, value, ttl, cas, options) 57 | end 58 | 59 | def add(key, value, ttl, options) 60 | opkey = quiet? ? :addq : :add 61 | storage_req(opkey, key, value, ttl, 0, options) 62 | end 63 | 64 | def replace(key, value, ttl, cas, options) 65 | opkey = quiet? ? :replaceq : :replace 66 | storage_req(opkey, key, value, ttl, cas, options) 67 | end 68 | 69 | # rubocop:disable Metrics/ParameterLists 70 | def storage_req(opkey, key, value, ttl, cas, options) 71 | (value, bitflags) = @value_marshaller.store(key, value, options) 72 | ttl = TtlSanitizer.sanitize(ttl) 73 | 74 | req = RequestFormatter.standard_request(opkey: opkey, key: key, 75 | value: value, bitflags: bitflags, 76 | ttl: ttl, cas: cas) 77 | write(req) 78 | response_processor.storage_response unless quiet? 79 | end 80 | # rubocop:enable Metrics/ParameterLists 81 | 82 | def append(key, value) 83 | opkey = quiet? ? :appendq : :append 84 | write_append_prepend opkey, key, value 85 | end 86 | 87 | def prepend(key, value) 88 | opkey = quiet? ? :prependq : :prepend 89 | write_append_prepend opkey, key, value 90 | end 91 | 92 | def write_append_prepend(opkey, key, value) 93 | write(RequestFormatter.standard_request(opkey: opkey, key: key, value: value)) 94 | response_processor.no_body_response unless quiet? 95 | end 96 | 97 | # Delete Commands 98 | def delete(key, cas) 99 | opkey = quiet? ? :deleteq : :delete 100 | req = RequestFormatter.standard_request(opkey: opkey, key: key, cas: cas) 101 | write(req) 102 | response_processor.delete unless quiet? 103 | end 104 | 105 | # Arithmetic Commands 106 | def decr(key, count, ttl, initial) 107 | opkey = quiet? ? :decrq : :decr 108 | decr_incr opkey, key, count, ttl, initial 109 | end 110 | 111 | def incr(key, count, ttl, initial) 112 | opkey = quiet? ? :incrq : :incr 113 | decr_incr opkey, key, count, ttl, initial 114 | end 115 | 116 | # This allows us to special case a nil initial value, and 117 | # handle it differently than a zero. This special value 118 | # for expiry causes memcached to return a not found 119 | # if the key doesn't already exist, rather than 120 | # setting the initial value 121 | NOT_FOUND_EXPIRY = 0xFFFFFFFF 122 | private_constant :NOT_FOUND_EXPIRY 123 | 124 | def decr_incr(opkey, key, count, ttl, initial) 125 | expiry = initial ? TtlSanitizer.sanitize(ttl) : NOT_FOUND_EXPIRY 126 | initial ||= 0 127 | write(RequestFormatter.decr_incr_request(opkey: opkey, key: key, 128 | count: count, initial: initial, expiry: expiry)) 129 | response_processor.decr_incr unless quiet? 130 | end 131 | 132 | # Other Commands 133 | def flush(ttl = 0) 134 | opkey = quiet? ? :flushq : :flush 135 | write(RequestFormatter.standard_request(opkey: opkey, ttl: ttl)) 136 | response_processor.no_body_response unless quiet? 137 | end 138 | 139 | # Noop is a keepalive operation but also used to demarcate the end of a set of pipelined commands. 140 | # We need to read all the responses at once. 141 | def noop 142 | write_noop 143 | response_processor.consume_all_responses_until_noop 144 | end 145 | 146 | def stats(info = '') 147 | req = RequestFormatter.standard_request(opkey: :stat, key: info) 148 | write(req) 149 | response_processor.stats 150 | end 151 | 152 | def reset_stats 153 | write(RequestFormatter.standard_request(opkey: :stat, key: 'reset')) 154 | response_processor.reset 155 | end 156 | 157 | def version 158 | write(RequestFormatter.standard_request(opkey: :version)) 159 | response_processor.version 160 | end 161 | 162 | def write_noop 163 | req = RequestFormatter.standard_request(opkey: :noop) 164 | write(req) 165 | end 166 | 167 | require_relative 'binary/request_formatter' 168 | require_relative 'binary/response_header' 169 | require_relative 'binary/response_processor' 170 | require_relative 'binary/sasl_authentication' 171 | include SaslAuthentication 172 | end 173 | end 174 | end 175 | -------------------------------------------------------------------------------- /lib/dalli/protocol/binary/request_formatter.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Dalli 4 | module Protocol 5 | class Binary 6 | ## 7 | # Class that encapsulates logic for formatting binary protocol requests 8 | # to memcached. 9 | ## 10 | class RequestFormatter 11 | REQUEST = 0x80 12 | 13 | OPCODES = { 14 | get: 0x00, 15 | set: 0x01, 16 | add: 0x02, 17 | replace: 0x03, 18 | delete: 0x04, 19 | incr: 0x05, 20 | decr: 0x06, 21 | flush: 0x08, 22 | noop: 0x0A, 23 | version: 0x0B, 24 | getkq: 0x0D, 25 | append: 0x0E, 26 | prepend: 0x0F, 27 | stat: 0x10, 28 | setq: 0x11, 29 | addq: 0x12, 30 | replaceq: 0x13, 31 | deleteq: 0x14, 32 | incrq: 0x15, 33 | decrq: 0x16, 34 | flushq: 0x18, 35 | appendq: 0x19, 36 | prependq: 0x1A, 37 | touch: 0x1C, 38 | gat: 0x1D, 39 | auth_negotiation: 0x20, 40 | auth_request: 0x21, 41 | auth_continue: 0x22 42 | }.freeze 43 | 44 | REQ_HEADER_FORMAT = 'CCnCCnNNQ' 45 | 46 | KEY_ONLY = 'a*' 47 | TTL_AND_KEY = 'Na*' 48 | KEY_AND_VALUE = 'a*a*' 49 | INCR_DECR = 'NNNNNa*' 50 | TTL_ONLY = 'N' 51 | NO_BODY = '' 52 | 53 | BODY_FORMATS = { 54 | get: KEY_ONLY, 55 | getkq: KEY_ONLY, 56 | delete: KEY_ONLY, 57 | deleteq: KEY_ONLY, 58 | stat: KEY_ONLY, 59 | 60 | append: KEY_AND_VALUE, 61 | prepend: KEY_AND_VALUE, 62 | appendq: KEY_AND_VALUE, 63 | prependq: KEY_AND_VALUE, 64 | auth_request: KEY_AND_VALUE, 65 | auth_continue: KEY_AND_VALUE, 66 | 67 | set: 'NNa*a*', 68 | setq: 'NNa*a*', 69 | add: 'NNa*a*', 70 | addq: 'NNa*a*', 71 | replace: 'NNa*a*', 72 | replaceq: 'NNa*a*', 73 | 74 | incr: INCR_DECR, 75 | decr: INCR_DECR, 76 | incrq: INCR_DECR, 77 | decrq: INCR_DECR, 78 | 79 | flush: TTL_ONLY, 80 | flushq: TTL_ONLY, 81 | 82 | noop: NO_BODY, 83 | auth_negotiation: NO_BODY, 84 | version: NO_BODY, 85 | 86 | touch: TTL_AND_KEY, 87 | gat: TTL_AND_KEY 88 | }.freeze 89 | FORMAT = BODY_FORMATS.transform_values { |v| REQ_HEADER_FORMAT + v } 90 | 91 | # rubocop:disable Metrics/ParameterLists 92 | def self.standard_request(opkey:, key: nil, value: nil, opaque: 0, cas: 0, bitflags: nil, ttl: nil) 93 | extra_len = (bitflags.nil? ? 0 : 4) + (ttl.nil? ? 0 : 4) 94 | key_len = key.nil? ? 0 : key.bytesize 95 | value_len = value.nil? ? 0 : value.bytesize 96 | header = [REQUEST, OPCODES[opkey], key_len, extra_len, 0, 0, extra_len + key_len + value_len, opaque, cas] 97 | body = [bitflags, ttl, key, value].compact 98 | (header + body).pack(FORMAT[opkey]) 99 | end 100 | # rubocop:enable Metrics/ParameterLists 101 | 102 | def self.decr_incr_request(opkey:, key: nil, count: nil, initial: nil, expiry: nil) 103 | extra_len = 20 104 | (h, l) = as_8byte_uint(count) 105 | (dh, dl) = as_8byte_uint(initial) 106 | header = [REQUEST, OPCODES[opkey], key.bytesize, extra_len, 0, 0, key.bytesize + extra_len, 0, 0] 107 | body = [h, l, dh, dl, expiry, key] 108 | (header + body).pack(FORMAT[opkey]) 109 | end 110 | 111 | def self.as_8byte_uint(val) 112 | [val >> 32, val & 0xFFFFFFFF] 113 | end 114 | end 115 | end 116 | end 117 | end 118 | -------------------------------------------------------------------------------- /lib/dalli/protocol/binary/response_header.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Dalli 4 | module Protocol 5 | class Binary 6 | ## 7 | # Class that encapsulates data parsed from a memcached response header. 8 | ## 9 | class ResponseHeader 10 | SIZE = 24 11 | FMT = '@2nCCnNNQ' 12 | 13 | attr_reader :key_len, :extra_len, :data_type, :status, :body_len, :opaque, :cas 14 | 15 | def initialize(buf) 16 | raise ArgumentError, "Response buffer must be at least #{SIZE} bytes" unless buf.bytesize >= SIZE 17 | 18 | @key_len, @extra_len, @data_type, @status, @body_len, @opaque, @cas = buf.unpack(FMT) 19 | end 20 | 21 | def ok? 22 | status.zero? 23 | end 24 | 25 | def not_found? 26 | status == 1 27 | end 28 | 29 | NOT_STORED_STATUSES = [2, 5].freeze 30 | def not_stored? 31 | NOT_STORED_STATUSES.include?(status) 32 | end 33 | end 34 | end 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /lib/dalli/protocol/binary/sasl_authentication.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Dalli 4 | module Protocol 5 | class Binary 6 | ## 7 | # Code to support SASL authentication 8 | ## 9 | module SaslAuthentication 10 | def perform_auth_negotiation 11 | write(RequestFormatter.standard_request(opkey: :auth_negotiation)) 12 | 13 | status, content = response_processor.auth_response 14 | return [status, []] if content.nil? 15 | 16 | # Substitute spaces for the \x00 returned by 17 | # memcached as a separator for easier 18 | content&.tr!("\u0000", ' ') 19 | mechanisms = content&.split 20 | [status, mechanisms] 21 | end 22 | 23 | PLAIN_AUTH = 'PLAIN' 24 | 25 | def supported_mechanisms!(mechanisms) 26 | unless mechanisms.include?(PLAIN_AUTH) 27 | raise NotImplementedError, 28 | 'Dalli only supports the PLAIN authentication mechanism' 29 | end 30 | [PLAIN_AUTH] 31 | end 32 | 33 | def authenticate_with_plain 34 | write(RequestFormatter.standard_request(opkey: :auth_request, 35 | key: PLAIN_AUTH, 36 | value: "\x0#{username}\x0#{password}")) 37 | @response_processor.auth_response 38 | end 39 | 40 | def authenticate_connection 41 | Dalli.logger.info { "Dalli/SASL authenticating as #{username}" } 42 | 43 | status, mechanisms = perform_auth_negotiation 44 | return Dalli.logger.debug('Authentication not required/supported by server') if status == 0x81 45 | 46 | supported_mechanisms!(mechanisms) 47 | status, content = authenticate_with_plain 48 | 49 | return Dalli.logger.info("Dalli/SASL: #{content}") if status.zero? 50 | 51 | raise Dalli::DalliError, "Error authenticating: 0x#{status.to_s(16)}" unless status == 0x21 52 | 53 | raise NotImplementedError, 'No two-step authentication mechanisms supported' 54 | # (step, msg) = sasl.receive('challenge', content) 55 | # raise Dalli::NetworkError, "Authentication failed" if sasl.failed? || step != 'response' 56 | end 57 | end 58 | end 59 | end 60 | end 61 | -------------------------------------------------------------------------------- /lib/dalli/protocol/connection_manager.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'English' 4 | require 'socket' 5 | require 'timeout' 6 | 7 | require 'dalli/pid_cache' 8 | 9 | module Dalli 10 | module Protocol 11 | ## 12 | # Manages the socket connection to the server, including ensuring liveness 13 | # and retries. 14 | ## 15 | class ConnectionManager 16 | DEFAULTS = { 17 | # seconds between trying to contact a remote server 18 | down_retry_delay: 30, 19 | # connect/read/write timeout for socket operations 20 | socket_timeout: 1, 21 | # times a socket operation may fail before considering the server dead 22 | socket_max_failures: 2, 23 | # amount of time to sleep between retries when a failure occurs 24 | socket_failure_delay: 0.1, 25 | # Set keepalive 26 | keepalive: true 27 | }.freeze 28 | 29 | attr_accessor :hostname, :port, :socket_type, :options 30 | attr_reader :sock 31 | 32 | def initialize(hostname, port, socket_type, client_options) 33 | @hostname = hostname 34 | @port = port 35 | @socket_type = socket_type 36 | @options = DEFAULTS.merge(client_options) 37 | @request_in_progress = false 38 | @sock = nil 39 | @pid = nil 40 | 41 | reset_down_info 42 | end 43 | 44 | def name 45 | if socket_type == :unix 46 | hostname 47 | else 48 | "#{hostname}:#{port}" 49 | end 50 | end 51 | 52 | def establish_connection 53 | Dalli.logger.debug { "Dalli::Server#connect #{name}" } 54 | 55 | @sock = memcached_socket 56 | @pid = PIDCache.pid 57 | @request_in_progress = false 58 | rescue SystemCallError, *TIMEOUT_ERRORS, EOFError, SocketError => e 59 | # SocketError = DNS resolution failure 60 | error_on_request!(e) 61 | end 62 | 63 | def reconnect_down_server? 64 | return true unless @last_down_at 65 | 66 | time_to_next_reconnect = @last_down_at + options[:down_retry_delay] - Time.now 67 | return true unless time_to_next_reconnect.positive? 68 | 69 | Dalli.logger.debug do 70 | format('down_retry_delay not reached for %s (%