From 2ec17ce4b510e611498d9f3fbf24fa720e42e57e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Boutillier?= <boutil@debian.org>
Date: Thu, 30 Nov 2023 14:34:18 +0100
Subject: [PATCH 1/4] Import Debian changes 3.0.6-2

---
 debian/changelog | 6 ++++++
 debian/control   | 3 ++-
 2 files changed, 8 insertions(+), 1 deletion(-)

diff --git a/debian/changelog b/debian/changelog
index 5ae480b..2e4438a 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+ruby-dalli (3.0.6-2) unstable; urgency=medium
+
+  * Depend explicitly on ruby-connection-pool (Closes: #1033310)
+
+ -- Cédric Boutillier <boutil@debian.org>  Thu, 30 Nov 2023 14:34:18 +0100
+
 ruby-dalli (3.0.6-1.1) unstable; urgency=medium
 
   * Non-maintainer upload.
diff --git a/debian/control b/debian/control
index e41a028..f33f1f3 100644
--- a/debian/control
+++ b/debian/control
@@ -23,7 +23,8 @@ Architecture: all
 XB-Ruby-Versions: ${ruby:Versions}
 Depends: ${misc:Depends},
          ${ruby:Depends},
-         ${shlibs:Depends}
+         ${shlibs:Depends},
+         ruby-connection-pool
 Suggests: ruby-kgio
 Description: memcached client library for Ruby
  Dalli is a high performance pure Ruby client for accessing memcached servers.
-- 
GitLab


From 80f3f41fe24ac8d2952e7bb07a71db7d1290a4a9 Mon Sep 17 00:00:00 2001
From: Apertis CI <devel@lists.apertis.org>
Date: Wed, 2 Apr 2025 10:30:16 +0000
Subject: [PATCH 2/4] Import Upstream version 3.2.8

---
 .github/dependabot.yml                        |   6 +
 .github/workflows/codeql-analysis.yml         |  12 +-
 .github/workflows/rubocop.yml                 |   4 +-
 .github/workflows/tests.yml                   |  20 +-
 .rubocop.yml                                  |   2 +-
 .rubocop_todo.yml                             |  22 +-
 History.md => CHANGELOG.md                    | 111 +++
 Gemfile                                       |  13 +-
 README.md                                     |  10 +-
 bin/console                                   |  15 +
 bin/setup                                     |   8 +
 dalli.gemspec                                 |  12 +-
 lib/dalli.rb                                  |  12 +-
 lib/dalli/client.rb                           | 444 ++++------
 lib/dalli/key_manager.rb                      |  14 +-
 lib/dalli/options.rb                          |   6 +-
 lib/dalli/pid_cache.rb                        |  40 +
 lib/dalli/pipelined_getter.rb                 | 177 ++++
 lib/dalli/protocol.rb                         |  11 +
 lib/dalli/protocol/base.rb                    | 250 ++++++
 lib/dalli/protocol/binary.rb                  | 525 ++----------
 .../protocol/binary/request_formatter.rb      |  20 +-
 lib/dalli/protocol/binary/response_header.rb  |  36 +
 .../protocol/binary/response_processor.rb     | 200 +++--
 .../protocol/binary/sasl_authentication.rb    |   4 +-
 lib/dalli/protocol/connection_manager.rb      | 255 ++++++
 lib/dalli/protocol/meta.rb                    | 178 +++++
 lib/dalli/protocol/meta/key_regularizer.rb    |  31 +
 lib/dalli/protocol/meta/request_formatter.rb  | 121 +++
 lib/dalli/protocol/meta/response_processor.rb | 211 +++++
 lib/dalli/protocol/response_buffer.rb         |  54 ++
 lib/dalli/protocol/server_config_parser.rb    |  16 +-
 lib/dalli/ring.rb                             |  12 +-
 lib/dalli/servers_arg_normalizer.rb           |   2 +-
 lib/dalli/socket.rb                           |  47 +-
 lib/dalli/version.rb                          |   2 +-
 lib/rack/session/dalli.rb                     | 168 ++--
 scripts/install_memcached.sh                  |  20 +-
 test/benchmark_test.rb                        |  23 +-
 test/helper.rb                                |  21 +-
 test/helpers/memcached.rb                     |  20 +-
 test/integration/test_authentication.rb       |  19 +
 test/integration/test_cas.rb                  | 346 ++++++++
 test/integration/test_compressor.rb           |  57 ++
 test/integration/test_concurrency.rb          |  55 ++
 test/integration/test_connection_pool.rb      |  21 +
 test/integration/test_encoding.rb             |  29 +
 test/integration/test_failover.rb             | 179 +++++
 test/integration/test_marshal.rb              |  40 +
 test/integration/test_memcached_admin.rb      |  68 ++
 test/integration/test_namespace_and_key.rb    |  96 +++
 test/integration/test_network.rb              | 364 +++++++++
 test/integration/test_operations.rb           | 379 +++++++++
 test/integration/test_pipelined_get.rb        | 107 +++
 test/integration/test_quiet.rb                | 283 +++++++
 test/integration/test_sasl.rb                 |  89 +++
 test/integration/test_serializer.rb           |  33 +
 test/integration/test_ttl.rb                  |  39 +
 test/protocol/meta/test_request_formatter.rb  | 245 ++++++
 .../test_binary.rb}                           |  33 +-
 test/protocol/test_server_config_parser.rb    |  83 +-
 test/protocol/test_value_compressor.rb        |  44 +-
 test/protocol/test_value_marshaller.rb        |  12 +-
 test/protocol/test_value_serializer.rb        |  27 +-
 test/test_cas_client.rb                       | 109 ---
 test/test_client_options.rb                   |  67 ++
 test/test_compressor.rb                       |  55 +-
 test/test_dalli.rb                            | 755 ------------------
 test/test_digest_class.rb                     |  11 +
 test/test_encoding.rb                         |  30 -
 test/test_failover.rb                         | 161 ----
 test/test_key_manager.rb                      |  66 +-
 test/test_network.rb                          |  68 --
 test/test_rack_session.rb                     |  66 +-
 test/test_ring.rb                             |  57 +-
 test/test_sasl.rb                             |  88 --
 test/test_serializer.rb                       |  28 -
 test/test_servers_arg_normalizer.rb           |   2 +
 test/utils/certificate_generator.rb           |   6 +-
 test/utils/memcached_manager.rb               |  34 +-
 80 files changed, 5021 insertions(+), 2385 deletions(-)
 create mode 100644 .github/dependabot.yml
 rename History.md => CHANGELOG.md (84%)
 create mode 100755 bin/console
 create mode 100755 bin/setup
 create mode 100644 lib/dalli/pid_cache.rb
 create mode 100644 lib/dalli/pipelined_getter.rb
 create mode 100644 lib/dalli/protocol/base.rb
 create mode 100644 lib/dalli/protocol/binary/response_header.rb
 create mode 100644 lib/dalli/protocol/connection_manager.rb
 create mode 100644 lib/dalli/protocol/meta.rb
 create mode 100644 lib/dalli/protocol/meta/key_regularizer.rb
 create mode 100644 lib/dalli/protocol/meta/request_formatter.rb
 create mode 100644 lib/dalli/protocol/meta/response_processor.rb
 create mode 100644 lib/dalli/protocol/response_buffer.rb
 create mode 100644 test/integration/test_authentication.rb
 create mode 100644 test/integration/test_cas.rb
 create mode 100644 test/integration/test_compressor.rb
 create mode 100644 test/integration/test_concurrency.rb
 create mode 100644 test/integration/test_connection_pool.rb
 create mode 100644 test/integration/test_encoding.rb
 create mode 100644 test/integration/test_failover.rb
 create mode 100644 test/integration/test_marshal.rb
 create mode 100644 test/integration/test_memcached_admin.rb
 create mode 100644 test/integration/test_namespace_and_key.rb
 create mode 100644 test/integration/test_network.rb
 create mode 100644 test/integration/test_operations.rb
 create mode 100644 test/integration/test_pipelined_get.rb
 create mode 100644 test/integration/test_quiet.rb
 create mode 100644 test/integration/test_sasl.rb
 create mode 100644 test/integration/test_serializer.rb
 create mode 100644 test/integration/test_ttl.rb
 create mode 100644 test/protocol/meta/test_request_formatter.rb
 rename test/{test_binary_protocol.rb => protocol/test_binary.rb} (83%)
 delete mode 100644 test/test_cas_client.rb
 create mode 100644 test/test_client_options.rb
 delete mode 100644 test/test_dalli.rb
 create mode 100644 test/test_digest_class.rb
 delete mode 100644 test/test_encoding.rb
 delete mode 100644 test/test_failover.rb
 delete mode 100644 test/test_network.rb
 delete mode 100644 test/test_sasl.rb
 delete mode 100644 test/test_serializer.rb

diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..5ace460
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+  - package-ecosystem: "github-actions"
+    directory: "/"
+    schedule:
+      interval: "weekly"
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index fd14590..172ec82 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -13,10 +13,10 @@ name: "CodeQL"
 
 on:
   push:
-    branches: [ master ]
+    branches: [ main ]
   pull_request:
     # The branches below must be a subset of the branches above
-    branches: [ master ]
+    branches: [ main ]
   schedule:
     - cron: '22 14 * * 5'
 
@@ -36,11 +36,11 @@ jobs:
 
     steps:
     - name: Checkout repository
-      uses: actions/checkout@v2
+      uses: actions/checkout@v4
 
     # Initializes the CodeQL tools for scanning.
     - name: Initialize CodeQL
-      uses: github/codeql-action/init@v1
+      uses: github/codeql-action/init@v3
       with:
         languages: ${{ matrix.language }}
         # If you wish to specify custom queries, you can do so here or in a config file.
@@ -51,7 +51,7 @@ jobs:
     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java).
     # If this step fails, then you should remove it and run the build manually (see below)
     - name: Autobuild
-      uses: github/codeql-action/autobuild@v1
+      uses: github/codeql-action/autobuild@v3
 
     # ℹ️ Command-line programs to run using the OS shell.
     # 📚 https://git.io/JvXDl
@@ -65,5 +65,5 @@ jobs:
     #   make release
 
     - name: Perform CodeQL Analysis
-      uses: github/codeql-action/analyze@v1
+      uses: github/codeql-action/analyze@v3
 
diff --git a/.github/workflows/rubocop.yml b/.github/workflows/rubocop.yml
index 9f0e5a3..7a731a9 100644
--- a/.github/workflows/rubocop.yml
+++ b/.github/workflows/rubocop.yml
@@ -7,11 +7,11 @@ jobs:
     runs-on: ubuntu-latest
 
     steps:
-    - uses: actions/checkout@v2
+    - uses: actions/checkout@v4
     - name: Set up Ruby
       uses: ruby/setup-ruby@v1
       with:
-        ruby-version: 2.5
+        ruby-version: 2.6
         bundler-cache: true # 'bundle install' and cache
     - name: Run RuboCop
       run: bundle exec rubocop --parallel
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 9d79476..83ae389 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -4,15 +4,25 @@ on: [push, pull_request]
 
 jobs:
   test:
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-20.04
 
     strategy:
+      fail-fast: false
       matrix:
-        ruby-version: [2.5, 2.6, 2.7, '3.0', jruby-9.2, jruby-9.3]
-        memcached-version: ['1.5.22', '1.6.12']
+        ruby-version:
+          - head
+          - '3.3'
+          - '3.2'
+          - '3.1'
+          - '3.0'
+          - '2.7'
+          - '2.6'
+          - jruby-9.3
+          - jruby-9.4
+        memcached-version: ['1.5.22', '1.6.23']
 
     steps:
-    - uses: actions/checkout@v2
+    - uses: actions/checkout@v4
     - name: Install Memcached ${{ matrix.memcached-version }}
       working-directory: scripts
       env:
@@ -27,3 +37,5 @@ jobs:
         bundler-cache: true # 'bundle install' and cache
     - name: Run tests
       run: bundle exec rake
+      env:
+        RUN_SASL_TESTS: 1
diff --git a/.rubocop.yml b/.rubocop.yml
index 7505262..2af66bc 100644
--- a/.rubocop.yml
+++ b/.rubocop.yml
@@ -7,7 +7,7 @@ require:
 
 AllCops:
   NewCops: enable
-  TargetRubyVersion: 2.5
+  TargetRubyVersion: 2.6
 
 Metrics/BlockLength:
   Max: 50
diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml
index 0a85b7d..eb2d2a7 100644
--- a/.rubocop_todo.yml
+++ b/.rubocop_todo.yml
@@ -1,25 +1,29 @@
 # This configuration was generated by
 # `rubocop --auto-gen-config`
-# on 2021-11-08 17:19:45 UTC using RuboCop version 1.22.3.
+# on 2022-01-02 05:40:35 UTC using RuboCop version 1.24.1.
 # The point is for the user to remove these configuration records
 # one by one as the offenses are removed from the code base.
 # Note that changes in the inspected code, or installation of new
 # versions of RuboCop, may require this file to be generated again.
 
-# Offense count: 7
+# Offense count: 1
 # Configuration parameters: IgnoredMethods, CountRepeatedAttributes.
 Metrics/AbcSize:
-  Max: 52
+  Max: 19
 
-# Offense count: 2
+# Offense count: 8
 # Configuration parameters: CountComments, CountAsOne.
 Metrics/ClassLength:
-  Max: 406
+  Max: 195
 
-# Offense count: 11
+# Offense count: 4
 # Configuration parameters: CountComments, CountAsOne, ExcludedMethods, IgnoredMethods.
 Metrics/MethodLength:
   Exclude:
-    - 'lib/dalli/client.rb'
-    - 'lib/dalli/protocol/binary.rb'
-    - 'lib/rack/session/dalli.rb'
+    - 'lib/dalli/pipelined_getter.rb'
+    - 'lib/dalli/protocol/base.rb'
+
+# Offense count: 1
+# Configuration parameters: CountComments, CountAsOne.
+Metrics/ModuleLength:
+  Max: 108
diff --git a/History.md b/CHANGELOG.md
similarity index 84%
rename from History.md
rename to CHANGELOG.md
index 765279e..bf38f81 100644
--- a/History.md
+++ b/CHANGELOG.md
@@ -1,6 +1,114 @@
 Dalli Changelog
 =====================
 
+Unreleased
+==========
+
+3.2.8
+==========
+
+- Handle IO::TimeoutError when establishing connection (eugeneius)
+- Drop dependency on base64 gem (Earlopain)
+- Address incompatibility with resolv-replace (y9v)
+- Add rubygems.org metadata (m-nakamura145)
+
+3.2.7
+==========
+
+- Fix cascading error when there's an underlying network error in a pipelined get (eugeneius)
+- Ruby 3.4/head compatibility by adding base64 to gemspec (tagliala)
+- Add Ruby 3.3 to CI (m-nakamura145)
+- Use Socket's connect_timeout when available, and pass timeout to the socket's send and receive timeouts (mlarraz)
+
+3.2.6
+==========
+
+- Rescue IO::TimeoutError raised by Ruby since 3.2.0 on blocking reads/writes (skaes)
+- Fix rubydoc link (JuanitoFatas)
+
+3.2.5
+==========
+
+- Better handle memcached requests being interrupted by Thread#raise or Thread#kill (byroot)
+- Unexpected errors are no longer treated as `Dalli::NetworkError`, including errors raised by `Timeout.timeout` (byroot)
+
+3.2.4
+==========
+
+- Cache PID calls for performance since glibc no longer caches in recent versions (byroot)
+- Preallocate the read buffer in Socket#readfull (byroot)
+
+3.2.3
+==========
+
+- Sanitize CAS inputs to ensure additional commands are not passed to memcached (xhzeem / petergoldstein)
+- Sanitize input to flush command to ensure additional commands are not passed to memcached (xhzeem / petergoldstein)
+- Namespaces passed as procs are now evaluated every time, as opposed to just on initialization (nrw505)
+- Fix missing require of uri in ServerConfigParser (adam12)
+- Fix link to the CHANGELOG.md file in README.md (rud)
+
+3.2.2
+==========
+
+- Ensure apps are resilient against old session ids (kbrock)
+
+3.2.1
+==========
+
+- Fix null replacement bug on some SASL-authenticated services (veritas1)
+
+3.2.0
+==========
+
+- BREAKING CHANGE: Remove protocol_implementation client option (petergoldstein)
+- Add protocol option with meta implementation (petergoldstein)
+
+3.1.6
+==========
+
+- Fix bug with cas/cas! with "Not found" value (petergoldstein)
+- Add Ruby 3.1 to CI (petergoldstein)
+- Replace reject(&:nil?) with compact (petergoldstein)
+
+3.1.5
+==========
+
+- Fix bug with get_cas key with "Not found" value (petergoldstein)
+- Replace should return nil, not raise error, on miss (petergoldstein)
+
+3.1.4
+==========
+
+- Improve response parsing performance (byroot)
+- Reorganize binary protocol parsing a bit (petergoldstein)
+- Fix handling of non-ASCII keys in get_multi (petergoldstein)
+
+3.1.3
+==========
+
+- Restore falsey behavior on delete/delete_cas for nonexistent key (petergoldstein)
+
+3.1.2
+==========
+
+- Make quiet? / multi? public on Dalli::Protocol::Binary (petergoldstein)
+
+3.1.1
+==========
+
+- Add quiet support for incr, decr, append, depend, and flush (petergoldstein)
+- Additional refactoring to allow reuse of connection behavior (petergoldstein)
+- Fix issue in flush such that it wasn't passing the delay argument to memcached (petergoldstein)
+
+3.1.0
+==========
+
+- BREAKING CHANGE: Update Rack::Session::Dalli to inherit from Abstract::PersistedSecure.  This will invalidate existing sessions (petergoldstein)
+- BREAKING CHANGE: Use of unsupported operations in a multi block now raise an error. (petergoldstein)
+- Extract PipelinedGetter from Dalli::Client (petergoldstein)
+- Fix SSL socket so that it works with pipelined gets (petergoldstein)
+- Additional refactoring to split classes (petergoldstein)
+
 3.0.6
 ==========
 
@@ -58,6 +166,9 @@ Dalli Changelog
   * The Rack session adapter has been refactored to remove support for thread-unsafe
     configurations. You will need to include the `connection_pool` gem in
     your Gemfile to ensure session operations are thread-safe.
+  * When using namespaces, the algorithm for calculating truncated keys was
+    changed.  Non-truncated keys and truncated keys for the non-namespace
+    case were left unchanged.
 
 - Raise NetworkError when multi response gets into corrupt state (mervync, #783)
 - Validate servers argument (semaperepelitsa, petergoldstein, #776)
diff --git a/Gemfile b/Gemfile
index e67f0dc..052aa37 100644
--- a/Gemfile
+++ b/Gemfile
@@ -4,13 +4,18 @@ source 'https://rubygems.org'
 
 gemspec
 
-group :test do
-  gem 'minitest'
-  gem 'rake'
+group :development, :test do
+  gem 'connection_pool'
+  gem 'minitest', '~> 5'
+  gem 'rack', '~> 2.0', '>= 2.2.0'
+  gem 'rake', '~> 13.0'
   gem 'rubocop'
   gem 'rubocop-minitest'
   gem 'rubocop-performance'
   gem 'rubocop-rake'
-  gem 'ruby-prof', platform: :mri
   gem 'simplecov'
 end
+
+group :test do
+  gem 'ruby-prof', platform: :mri
+end
diff --git a/README.md b/README.md
index 45f67c5..e29a239 100644
--- a/README.md
+++ b/README.md
@@ -23,11 +23,17 @@ The name is a variant of Salvador Dali for his famous painting [The Persistence
 * [Announcements](https://github.com/petergoldstein/dalli/discussions/categories/announcements) - Announcements of interest to the Dalli community will be posted here.
 * [Bug Reports](https://github.com/petergoldstein/dalli/issues) - If you discover a problem with Dalli, please submit a bug report in the tracker.
 * [Forum](https://github.com/petergoldstein/dalli/discussions/categories/q-a) - If you have questions about Dalli, please post them here.
-* [Client API](https://www.rubydoc.info/github/petergoldstein/dalli/master/Dalli/Client) - Ruby documentation for the `Dalli::Client` API
+* [Client API](https://www.rubydoc.info/gems/dalli) - Ruby documentation for the `Dalli::Client` API
+
+## Development
+
+After checking out the repo, run `bin/setup` to install dependencies. You can run `bin/console` for an interactive prompt that will allow you to experiment.
+
+To install this gem onto your local machine, run `bundle exec rake install`.
 
 ## Contributing
 
-If you have a fix you wish to provide, please fork the code, fix in your local project and then send a pull request on github.  Please ensure that you include a test which verifies your fix and update `History.md` with a one sentence description of your fix so you get credit as a contributor.
+If you have a fix you wish to provide, please fork the code, fix in your local project and then send a pull request on github.  Please ensure that you include a test which verifies your fix and update the [changelog](CHANGELOG.md) with a one sentence description of your fix so you get credit as a contributor.
 
 ## Appreciation
 
diff --git a/bin/console b/bin/console
new file mode 100755
index 0000000..66884bf
--- /dev/null
+++ b/bin/console
@@ -0,0 +1,15 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'bundler/setup'
+require 'dalli'
+
+# You can add fixtures and/or initialization code here to make experimenting
+# with your gem easier. You can also use a different console, if you like.
+
+# (If you use this, don't forget to add pry to your Gemfile!)
+# require "pry"
+# Pry.start
+
+require 'irb'
+IRB.start(__FILE__)
diff --git a/bin/setup b/bin/setup
new file mode 100755
index 0000000..dce67d8
--- /dev/null
+++ b/bin/setup
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+set -euo pipefail
+IFS=$'\n\t'
+set -vx
+
+bundle install
+
+# Do any other automated setup that you need to do here
diff --git a/dalli.gemspec b/dalli.gemspec
index a4fe17a..110372c 100644
--- a/dalli.gemspec
+++ b/dalli.gemspec
@@ -13,19 +13,15 @@ Gem::Specification.new do |s|
   s.files = Dir.glob('lib/**/*') + [
     'LICENSE',
     'README.md',
-    'History.md',
+    'CHANGELOG.md',
     'Gemfile'
   ]
   s.homepage = 'https://github.com/petergoldstein/dalli'
-  s.required_ruby_version = '>= 2.5'
+  s.required_ruby_version = '>= 2.6'
 
-  s.add_development_dependency 'connection_pool'
-  s.add_development_dependency 'rack'
-  s.add_development_dependency 'rubocop'
-  s.add_development_dependency 'rubocop-minitest'
-  s.add_development_dependency 'rubocop-performance'
-  s.add_development_dependency 'rubocop-rake'
   s.metadata = {
+    'bug_tracker_uri' => 'https://github.com/petergoldstein/dalli/issues',
+    'changelog_uri' => 'https://github.com/petergoldstein/dalli/blob/main/CHANGELOG.md',
     'rubygems_mfa_required' => 'true'
   }
 end
diff --git a/lib/dalli.rb b/lib/dalli.rb
index 357a64e..ad05e3b 100644
--- a/lib/dalli.rb
+++ b/lib/dalli.rb
@@ -24,12 +24,17 @@ module Dalli
   # payload too big for memcached
   class ValueOverMaxSize < DalliError; end
 
+  # operation is not permitted in a multi block
+  class NotPermittedMultiOpError < DalliError; end
+
   # Implements the NullObject pattern to store an application-defined value for 'Key not found' responses.
   class NilObject; end # rubocop:disable Lint/EmptyClass
   NOT_FOUND = NilObject.new
 
+  QUIET = :dalli_multi
+
   def self.logger
-    @logger ||= (rails_logger || default_logger)
+    @logger ||= rails_logger || default_logger
   end
 
   def self.rails_logger
@@ -54,9 +59,14 @@ require_relative 'dalli/version'
 require_relative 'dalli/compressor'
 require_relative 'dalli/client'
 require_relative 'dalli/key_manager'
+require_relative 'dalli/pipelined_getter'
 require_relative 'dalli/ring'
 require_relative 'dalli/protocol'
+require_relative 'dalli/protocol/base'
 require_relative 'dalli/protocol/binary'
+require_relative 'dalli/protocol/connection_manager'
+require_relative 'dalli/protocol/meta'
+require_relative 'dalli/protocol/response_buffer'
 require_relative 'dalli/protocol/server_config_parser'
 require_relative 'dalli/protocol/ttl_sanitizer'
 require_relative 'dalli/protocol/value_compressor'
diff --git a/lib/dalli/client.rb b/lib/dalli/client.rb
index 1e90e09..a590e31 100644
--- a/lib/dalli/client.rb
+++ b/lib/dalli/client.rb
@@ -43,13 +43,13 @@ module Dalli
     #                 #fetch operations.
     # - :digest_class - defaults to Digest::MD5, allows you to pass in an object that responds to the hexdigest method,
     #                   useful for injecting a FIPS compliant hash object.
-    # - :protocol_implementation - defaults to Dalli::Protocol::Binary which uses the binary protocol. Allows you to
-    #                              pass an alternative implementation using another protocol.
+    # - :protocol - one of either :binary or :meta, defaulting to :binary.  This sets the protocol that Dalli uses
+    #               to communicate with memcached.
     #
     def initialize(servers = nil, options = {})
-      @servers = ::Dalli::ServersArgNormalizer.normalize_servers(servers)
+      @normalized_servers = ::Dalli::ServersArgNormalizer.normalize_servers(servers)
       @options = normalize_options(options)
-      @key_manager = ::Dalli::KeyManager.new(options)
+      @key_manager = ::Dalli::KeyManager.new(@options)
       @ring = nil
     end
 
@@ -58,24 +58,37 @@ module Dalli
     #
 
     ##
-    # Turn on quiet aka noreply support.
-    # All relevant operations within this block will be effectively
-    # pipelined as Dalli will use 'quiet' operations where possible.
-    # Currently supports the set, add, replace and delete operations.
-    def multi
-      old = Thread.current[:dalli_multi]
-      Thread.current[:dalli_multi] = true
-      yield
-    ensure
-      @ring&.flush_multi_responses
-      Thread.current[:dalli_multi] = old
+    # Get the value associated with the key.
+    # If a value is not found, then +nil+ is returned.
+    def get(key, req_options = nil)
+      perform(:get, key, req_options)
     end
 
     ##
-    # Get the value associated with the key.
+    # Gat (get and touch) fetch an item and simultaneously update its expiration time.
+    #
     # If a value is not found, then +nil+ is returned.
-    def get(key, options = nil)
-      perform(:get, key, options)
+    def gat(key, ttl = nil)
+      perform(:gat, key, ttl_or_default(ttl))
+    end
+
+    ##
+    # Touch updates expiration time for a given key.
+    #
+    # Returns true if key exists, otherwise nil.
+    def touch(key, ttl = nil)
+      resp = perform(:touch, key, ttl_or_default(ttl))
+      resp.nil? ? nil : true
+    end
+
+    ##
+    # Get the value and CAS ID associated with the key.  If a block is provided,
+    # value and CAS will be passed to the block.
+    def get_cas(key)
+      (value, cas) = perform(:cas, key)
+      return [value, cas] unless block_given?
+
+      yield value, cas
     end
 
     ##
@@ -89,15 +102,29 @@ module Dalli
       return {} if keys.empty?
 
       if block_given?
-        get_multi_yielder(keys) { |k, data| yield k, data.first }
+        pipelined_getter.process(keys) { |k, data| yield k, data.first }
       else
         {}.tap do |hash|
-          get_multi_yielder(keys) { |k, data| hash[k] = data.first }
+          pipelined_getter.process(keys) { |k, data| hash[k] = data.first }
         end
       end
     end
 
-    CACHE_NILS = { cache_nils: true }.freeze
+    ##
+    # Fetch multiple keys efficiently, including available metadata such as CAS.
+    # If a block is given, yields key/data pairs one a time.  Data is an array:
+    # [value, cas_id]
+    # If no block is given, returns a hash of
+    #   { 'key' => [value, cas_id] }
+    def get_multi_cas(*keys)
+      if block_given?
+        pipelined_getter.process(keys) { |*args| yield(*args) }
+      else
+        {}.tap do |hash|
+          pipelined_getter.process(keys) { |k, data| hash[k] = data }
+        end
+      end
+    end
 
     # Fetch the value associated with the key.
     # If a value is found, then it is returned.
@@ -110,19 +137,11 @@ module Dalli
     def fetch(key, ttl = nil, req_options = nil)
       req_options = req_options.nil? ? CACHE_NILS : req_options.merge(CACHE_NILS) if cache_nils
       val = get(key, req_options)
-      if not_found?(val) && block_given?
-        val = yield
-        add(key, val, ttl_or_default(ttl), req_options)
-      end
-      val
-    end
+      return val unless block_given? && not_found?(val)
 
-    def not_found?(val)
-      cache_nils ? val == ::Dalli::NOT_FOUND : val.nil?
-    end
-
-    def cache_nils
-      @options[:cache_nils]
+      new_val = yield
+      add(key, new_val, ttl_or_default(ttl), req_options)
+      new_val
     end
 
     ##
@@ -136,8 +155,8 @@ module Dalli
     # - nil if the key did not exist.
     # - false if the value was changed by someone else.
     # - true if the value was successfully updated.
-    def cas(key, ttl = nil, options = nil, &block)
-      cas_core(key, false, ttl, options, &block)
+    def cas(key, ttl = nil, req_options = nil, &block)
+      cas_core(key, false, ttl, req_options, &block)
     end
 
     ##
@@ -147,30 +166,78 @@ module Dalli
     # Returns:
     # - false if the value was changed by someone else.
     # - true if the value was successfully updated.
-    def cas!(key, ttl = nil, options = nil, &block)
-      cas_core(key, true, ttl, options, &block)
+    def cas!(key, ttl = nil, req_options = nil, &block)
+      cas_core(key, true, ttl, req_options, &block)
     end
 
-    def set(key, value, ttl = nil, options = nil)
-      perform(:set, key, value, ttl_or_default(ttl), 0, options)
+    ##
+    # Turn on quiet aka noreply support for a number of
+    # memcached operations.
+    #
+    # All relevant operations within this block will be effectively
+    # pipelined as Dalli will use 'quiet' versions.  The invoked methods
+    # will all return nil, rather than their usual response.  Method
+    # latency will be substantially lower, as the caller will not be
+    # blocking on responses.
+    #
+    # Currently supports storage (set, add, replace, append, prepend),
+    # arithmetic (incr, decr), flush and delete operations.  Use of
+    # unsupported operations inside a block will raise an error.
+    #
+    # Any error replies will be discarded at the end of the block, and
+    # Dalli client methods invoked inside the block will not
+    # have return values
+    def quiet
+      old = Thread.current[::Dalli::QUIET]
+      Thread.current[::Dalli::QUIET] = true
+      yield
+    ensure
+      @ring&.pipeline_consume_and_ignore_responses
+      Thread.current[::Dalli::QUIET] = old
+    end
+    alias multi quiet
+
+    def set(key, value, ttl = nil, req_options = nil)
+      set_cas(key, value, 0, ttl, req_options)
+    end
+
+    ##
+    # Set the key-value pair, verifying existing CAS.
+    # Returns the resulting CAS value if succeeded, and falsy otherwise.
+    def set_cas(key, value, cas, ttl = nil, req_options = nil)
+      perform(:set, key, value, ttl_or_default(ttl), cas, req_options)
     end
 
     ##
     # Conditionally add a key/value pair, if the key does not already exist
     # on the server.  Returns truthy if the operation succeeded.
-    def add(key, value, ttl = nil, options = nil)
-      perform(:add, key, value, ttl_or_default(ttl), options)
+    def add(key, value, ttl = nil, req_options = nil)
+      perform(:add, key, value, ttl_or_default(ttl), req_options)
     end
 
     ##
     # Conditionally add a key/value pair, only if the key already exists
     # on the server.  Returns truthy if the operation succeeded.
-    def replace(key, value, ttl = nil, options = nil)
-      perform(:replace, key, value, ttl_or_default(ttl), 0, options)
+    def replace(key, value, ttl = nil, req_options = nil)
+      replace_cas(key, value, 0, ttl, req_options)
+    end
+
+    ##
+    # Conditionally add a key/value pair, verifying existing CAS, only if the
+    # key already exists on the server.  Returns the new CAS value if the
+    # operation succeeded, or falsy otherwise.
+    def replace_cas(key, value, cas, ttl = nil, req_options = nil)
+      perform(:replace, key, value, ttl_or_default(ttl), cas, req_options)
+    end
+
+    # Delete a key/value pair, verifying existing CAS.
+    # Returns true if succeeded, and falsy otherwise.
+    def delete_cas(key, cas = 0)
+      perform(:delete, key, cas)
     end
 
     def delete(key)
-      perform(:delete, key, 0)
+      delete_cas(key, 0)
     end
 
     ##
@@ -187,13 +254,6 @@ module Dalli
       perform(:prepend, key, value.to_s)
     end
 
-    def flush(delay = 0)
-      time = -delay
-      ring.servers.map { |s| s.request(:flush, time += delay) }
-    end
-
-    alias flush_all flush
-
     ##
     # Incr adds the given amount to the counter on the memcached server.
     # Amt must be a positive integer value.
@@ -205,8 +265,10 @@ module Dalli
     # Note that the ttl will only apply if the counter does not already
     # exist.  To increase an existing counter and update its TTL, use
     # #cas.
+    #
+    # If the value already exists, it must have been set with raw: true
     def incr(key, amt = 1, ttl = nil, default = nil)
-      raise ArgumentError, "Positive values only: #{amt}" if amt.negative?
+      check_positive!(amt)
 
       perform(:incr, key, amt.to_i, ttl_or_default(ttl), default)
     end
@@ -225,35 +287,31 @@ module Dalli
     # Note that the ttl will only apply if the counter does not already
     # exist.  To decrease an existing counter and update its TTL, use
     # #cas.
+    #
+    # If the value already exists, it must have been set with raw: true
     def decr(key, amt = 1, ttl = nil, default = nil)
-      raise ArgumentError, "Positive values only: #{amt}" if amt.negative?
+      check_positive!(amt)
 
       perform(:decr, key, amt.to_i, ttl_or_default(ttl), default)
     end
 
     ##
-    # Touch updates expiration time for a given key.
-    #
-    # Returns true if key exists, otherwise nil.
-    def touch(key, ttl = nil)
-      resp = perform(:touch, key, ttl_or_default(ttl))
-      resp.nil? ? nil : true
-    end
-
+    # Flush the memcached server, at 'delay' seconds in the future.
+    # Delay defaults to zero seconds, which means an immediate flush.
     ##
-    # Gat (get and touch) fetch an item and simultaneously update its expiration time.
-    #
-    # If a value is not found, then +nil+ is returned.
-    def gat(key, ttl = nil)
-      perform(:gat, key, ttl_or_default(ttl))
+    def flush(delay = 0)
+      ring.servers.map { |s| s.request(:flush, delay) }
     end
+    alias flush_all flush
+
+    ALLOWED_STAT_KEYS = %i[items slabs settings].freeze
 
     ##
     # Collect the stats for each server.
     # You can optionally pass a type including :items, :slabs or :settings to get specific stats
     # Returns a hash like { 'hostname:port' => { 'stat1' => 'value1', ... }, 'hostname2:port' => { ... } }
     def stats(type = nil)
-      type = nil unless [nil, :items, :slabs, :settings].include? type
+      type = nil unless ALLOWED_STAT_KEYS.include? type
       values = {}
       ring.servers.each do |server|
         values[server.name.to_s] = server.alive? ? server.request(:stats, type.to_s) : nil
@@ -269,12 +327,6 @@ module Dalli
       end
     end
 
-    ##
-    ## Make sure memcache servers are alive, or raise an Dalli::RingError
-    def alive!
-      ring.server_for_key('')
-    end
-
     ##
     ## Version of the memcache servers.
     def version
@@ -286,68 +338,30 @@ module Dalli
     end
 
     ##
-    # Get the value and CAS ID associated with the key.  If a block is provided,
-    # value and CAS will be passed to the block.
-    def get_cas(key)
-      (value, cas) = perform(:cas, key)
-      value = nil if !value || value == 'Not found'
-      if block_given?
-        yield value, cas
-      else
-        [value, cas]
-      end
-    end
-
-    ##
-    # Fetch multiple keys efficiently, including available metadata such as CAS.
-    # If a block is given, yields key/data pairs one a time.  Data is an array:
-    # [value, cas_id]
-    # If no block is given, returns a hash of
-    #   { 'key' => [value, cas_id] }
-    def get_multi_cas(*keys)
-      if block_given?
-        get_multi_yielder(keys) { |*args| yield(*args) }
-      else
-        {}.tap do |hash|
-          get_multi_yielder(keys) { |k, data| hash[k] = data }
-        end
-      end
-    end
-
-    ##
-    # Set the key-value pair, verifying existing CAS.
-    # Returns the resulting CAS value if succeeded, and falsy otherwise.
-    def set_cas(key, value, cas, ttl = nil, options = nil)
-      ttl ||= @options[:expires_in].to_i
-      perform(:set, key, value, ttl, cas, options)
-    end
-
-    ##
-    # Conditionally add a key/value pair, verifying existing CAS, only if the
-    # key already exists on the server.  Returns the new CAS value if the
-    # operation succeeded, or falsy otherwise.
-    def replace_cas(key, value, cas, ttl = nil, options = nil)
-      ttl ||= @options[:expires_in].to_i
-      perform(:replace, key, value, ttl, cas, options)
-    end
-
-    # Delete a key/value pair, verifying existing CAS.
-    # Returns true if succeeded, and falsy otherwise.
-    def delete_cas(key, cas = 0)
-      perform(:delete, key, cas)
+    ## Make sure memcache servers are alive, or raise an Dalli::RingError
+    def alive!
+      ring.server_for_key('')
     end
 
     ##
     # Close our connection to each server.
     # If you perform another operation after this, the connections will be re-established.
     def close
-      return unless @ring
-
-      @ring.servers.each(&:close)
+      @ring&.close
       @ring = nil
     end
     alias reset close
 
+    CACHE_NILS = { cache_nils: true }.freeze
+
+    def not_found?(val)
+      cache_nils ? val == ::Dalli::NOT_FOUND : val.nil?
+    end
+
+    def cache_nils
+      @options[:cache_nils]
+    end
+
     # Stub method so a bare Dalli client can pretend to be a connection pool.
     def with
       yield self
@@ -355,15 +369,22 @@ module Dalli
 
     private
 
-    def cas_core(key, always_set, ttl = nil, options = nil)
+    def check_positive!(amt)
+      raise ArgumentError, "Positive values only: #{amt}" if amt.negative?
+    end
+
+    def cas_core(key, always_set, ttl = nil, req_options = nil)
       (value, cas) = perform(:cas, key)
-      value = nil if !value || value == 'Not found'
       return if value.nil? && !always_set
 
       newvalue = yield(value)
-      perform(:set, key, newvalue, ttl_or_default(ttl), cas, options)
+      perform(:set, key, newvalue, ttl_or_default(ttl), cas, req_options)
     end
 
+    ##
+    # Uses the argument TTL or the client-wide default.  Ensures
+    # that the value is an integer
+    ##
     def ttl_or_default(ttl)
       (ttl || @options[:expires_in]).to_i
     rescue NoMethodError
@@ -371,20 +392,28 @@ module Dalli
     end
 
     def ring
-      # TODO: This server initialization should probably be pushed down
-      # to the Ring
-      @ring ||= Dalli::Ring.new(
-        @servers.map do |s|
-          protocol_implementation.new(s, @options)
-        end, @options
-      )
+      @ring ||= Dalli::Ring.new(@normalized_servers, protocol_implementation, @options)
     end
 
     def protocol_implementation
-      @protocol_implementation ||= @options.fetch(:protocol_implementation, Dalli::Protocol::Binary)
+      @protocol_implementation ||= case @options[:protocol]&.to_s
+                                   when 'meta'
+                                     Dalli::Protocol::Meta
+                                   else
+                                     Dalli::Protocol::Binary
+                                   end
     end
 
-    # Chokepoint method for instrumentation
+    ##
+    # Chokepoint method for memcached methods with a key argument.
+    # Validates the key, resolves the key to the appropriate server
+    # instance, and invokes the memcached method on the appropriate
+    # server.
+    #
+    # This method also forces retries on network errors - when
+    # a particular memcached instance becomes unreachable, or the
+    # operational times out.
+    ##
     def perform(*all_args)
       return yield if block_given?
 
@@ -402,145 +431,14 @@ module Dalli
     end
 
     def normalize_options(opts)
-      begin
-        opts[:expires_in] = opts[:expires_in].to_i if opts[:expires_in]
-      rescue NoMethodError
-        raise ArgumentError, "cannot convert :expires_in => #{opts[:expires_in].inspect} to an integer"
-      end
+      opts[:expires_in] = opts[:expires_in].to_i if opts[:expires_in]
       opts
+    rescue NoMethodError
+      raise ArgumentError, "cannot convert :expires_in => #{opts[:expires_in].inspect} to an integer"
     end
 
-    # TODO: Look at extracting below into separate MultiYielder class
-
-    ##
-    # Yields, one at a time, keys and their values+attributes.
-    #
-    def get_multi_yielder(keys, &block)
-      return {} if keys.empty?
-
-      ring.lock do
-        groups = groups_for_keys(keys)
-        if (unfound_keys = groups.delete(nil))
-          Dalli.logger.debug do
-            "unable to get keys for #{unfound_keys.length} keys "\
-              'because no matching server was found'
-          end
-        end
-        make_multi_get_requests(groups)
-
-        servers = groups.keys
-        return if servers.empty?
-
-        # TODO: How does this exit on a NetworkError
-        servers = perform_multi_response_start(servers)
-
-        timeout = servers.first.options[:socket_timeout]
-        start_time = Time.now
-        loop do
-          # remove any dead servers
-          # TODO: Is this well behaved in a multi-threaded environment?
-          # Accessing the server socket like this seems problematic
-          servers.delete_if { |s| s.sock.nil? }
-          break if servers.empty?
-
-          servers = multi_yielder_loop(servers, start_time, timeout, &block)
-        end
-      end
-    rescue NetworkError => e
-      Dalli.logger.debug { e.inspect }
-      Dalli.logger.debug { 'retrying multi yielder because of timeout' }
-      retry
-    end
-
-    def make_multi_get_requests(groups)
-      groups.each do |server, keys_for_server|
-        server.request(:send_multiget, keys_for_server)
-      rescue DalliError, NetworkError => e
-        Dalli.logger.debug { e.inspect }
-        Dalli.logger.debug { "unable to get keys for server #{server.name}" }
-      end
-    end
-
-    # raises Dalli::NetworkError
-    def perform_multi_response_start(servers)
-      deleted = []
-
-      servers.each do |server|
-        next unless server.alive?
-
-        begin
-          server.multi_response_start
-        rescue Dalli::NetworkError
-          abort_multi_response(servers)
-          raise
-        rescue Dalli::DalliError => e
-          Dalli.logger.debug { e.inspect }
-          Dalli.logger.debug { 'results from this server will be missing' }
-          deleted.append(server)
-        end
-      end
-
-      servers.delete_if { |server| deleted.include?(server) }
-    end
-
-    # Swallows Dalli::NetworkError
-    def abort_multi_response(servers)
-      servers.each(&:multi_response_abort)
-    end
-
-    def multi_yielder_loop(servers, start_time, timeout, &block)
-      time_left = remaining_time(start_time, timeout)
-      readable_servers = servers_with_data(servers, time_left)
-      if readable_servers.empty?
-        abort_multi_connections_w_timeout(servers)
-        return readable_servers
-      end
-
-      readable_servers.each do |server|
-        servers.delete(server) if respond_to_readable_server(server, &block)
-      end
-      servers
-    rescue NetworkError
-      abort_multi_response(servers)
-      raise
-    end
-
-    def remaining_time(start, timeout)
-      elapsed = Time.now - start
-      return 0 if elapsed > timeout
-
-      timeout - elapsed
-    end
-
-    # Swallows Dalli::NetworkError
-    def abort_multi_connections_w_timeout(servers)
-      abort_multi_response(servers)
-      servers.each do |server|
-        Dalli.logger.debug { "memcached at #{server.name} did not response within timeout" }
-      end
-
-      true # Required to simplify caller
-    end
-
-    def respond_to_readable_server(server)
-      server.multi_response_nonblock.each_pair do |key, value_list|
-        yield @key_manager.key_without_namespace(key), value_list
-      end
-
-      server.multi_response_completed?
-    end
-
-    def servers_with_data(servers, timeout)
-      readable, = IO.select(servers.map(&:sock), nil, nil, timeout)
-      return [] if readable.nil?
-
-      readable.map(&:server)
-    end
-
-    def groups_for_keys(*keys)
-      keys.flatten!
-      keys.map! { |a| @key_manager.validate_key(a.to_s) }
-      ring.keys_grouped_by_server(keys)
+    def pipelined_getter
+      PipelinedGetter.new(ring, @key_manager)
     end
   end
 end
diff --git a/lib/dalli/key_manager.rb b/lib/dalli/key_manager.rb
index 8376056..509d8f0 100644
--- a/lib/dalli/key_manager.rb
+++ b/lib/dalli/key_manager.rb
@@ -61,7 +61,7 @@ module Dalli
     def key_with_namespace(key)
       return key if namespace.nil?
 
-      "#{namespace}#{NAMESPACE_SEPARATOR}#{key}"
+      "#{evaluate_namespace}#{NAMESPACE_SEPARATOR}#{key}"
     end
 
     def key_without_namespace(key)
@@ -75,6 +75,8 @@ module Dalli
     end
 
     def namespace_regexp
+      return /\A#{Regexp.escape(evaluate_namespace)}:/ if namespace.is_a?(Proc)
+
       @namespace_regexp ||= /\A#{Regexp.escape(namespace)}:/.freeze unless namespace.nil?
     end
 
@@ -87,9 +89,15 @@ module Dalli
     def namespace_from_options
       raw_namespace = @key_options[:namespace]
       return nil unless raw_namespace
-      return raw_namespace.call.to_s if raw_namespace.is_a?(Proc)
+      return raw_namespace.to_s unless raw_namespace.is_a?(Proc)
+
+      raw_namespace
+    end
+
+    def evaluate_namespace
+      return namespace.call.to_s if namespace.is_a?(Proc)
 
-      raw_namespace.to_s
+      namespace
     end
 
     ##
diff --git a/lib/dalli/options.rb b/lib/dalli/options.rb
index 01fcad1..a48cd4b 100644
--- a/lib/dalli/options.rb
+++ b/lib/dalli/options.rb
@@ -31,19 +31,19 @@ module Dalli
       end
     end
 
-    def multi_response_start
+    def pipeline_response_setup
       @lock.synchronize do
         super
       end
     end
 
-    def multi_response_nonblock
+    def pipeline_next_responses
       @lock.synchronize do
         super
       end
     end
 
-    def multi_response_abort
+    def pipeline_abort
       @lock.synchronize do
         super
       end
diff --git a/lib/dalli/pid_cache.rb b/lib/dalli/pid_cache.rb
new file mode 100644
index 0000000..ebff40b
--- /dev/null
+++ b/lib/dalli/pid_cache.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Dalli
+  ##
+  # Dalli::PIDCache is a wrapper class for PID checking to avoid system calls when checking the PID.
+  ##
+  module PIDCache
+    if !Process.respond_to?(:fork) # JRuby or TruffleRuby
+      @pid = Process.pid
+      singleton_class.attr_reader(:pid)
+    elsif Process.respond_to?(:_fork) # Ruby 3.1+
+      class << self
+        attr_reader :pid
+
+        def update!
+          @pid = Process.pid
+        end
+      end
+      update!
+
+      ##
+      # Dalli::PIDCache::CoreExt hooks into Process to be able to reset the PID cache after fork
+      ##
+      module CoreExt
+        def _fork
+          child_pid = super
+          PIDCache.update! if child_pid.zero?
+          child_pid
+        end
+      end
+      Process.singleton_class.prepend(CoreExt)
+    else # Ruby 3.0 or older
+      class << self
+        def pid
+          Process.pid
+        end
+      end
+    end
+  end
+end
diff --git a/lib/dalli/pipelined_getter.rb b/lib/dalli/pipelined_getter.rb
new file mode 100644
index 0000000..5fbb8bb
--- /dev/null
+++ b/lib/dalli/pipelined_getter.rb
@@ -0,0 +1,177 @@
+# frozen_string_literal: true
+
+module Dalli
+  ##
+  # Contains logic for the pipelined gets implemented by the client.
+  ##
+  class PipelinedGetter
+    def initialize(ring, key_manager)
+      @ring = ring
+      @key_manager = key_manager
+    end
+
+    ##
+    # Yields, one at a time, keys and their values+attributes.
+    #
+    def process(keys, &block)
+      return {} if keys.empty?
+
+      @ring.lock do
+        servers = setup_requests(keys)
+        start_time = Time.now
+        servers = fetch_responses(servers, start_time, @ring.socket_timeout, &block) until servers.empty?
+      end
+    rescue NetworkError => e
+      Dalli.logger.debug { e.inspect }
+      Dalli.logger.debug { 'retrying pipelined gets because of timeout' }
+      retry
+    end
+
+    def setup_requests(keys)
+      groups = groups_for_keys(keys)
+      make_getkq_requests(groups)
+
+      # TODO: How does this exit on a NetworkError
+      finish_queries(groups.keys)
+    end
+
+    ##
+    # Loop through the server-grouped sets of keys, writing
+    # the corresponding getkq requests to the appropriate servers
+    #
+    # It's worth noting that we could potentially reduce bytes
+    # on the wire by switching from getkq to getq, and using
+    # the opaque value to match requests to responses.
+    ##
+    def make_getkq_requests(groups)
+      groups.each do |server, keys_for_server|
+        server.request(:pipelined_get, keys_for_server)
+      rescue DalliError, NetworkError => e
+        Dalli.logger.debug { e.inspect }
+        Dalli.logger.debug { "unable to get keys for server #{server.name}" }
+      end
+    end
+
+    ##
+    # This loops through the servers that have keys in
+    # our set, sending the noop to terminate the set of queries.
+    ##
+    def finish_queries(servers)
+      deleted = []
+
+      servers.each do |server|
+        next unless server.connected?
+
+        begin
+          finish_query_for_server(server)
+        rescue Dalli::NetworkError
+          raise
+        rescue Dalli::DalliError
+          deleted.append(server)
+        end
+      end
+
+      servers.delete_if { |server| deleted.include?(server) }
+    rescue Dalli::NetworkError
+      abort_without_timeout(servers)
+      raise
+    end
+
+    def finish_query_for_server(server)
+      server.pipeline_response_setup
+    rescue Dalli::NetworkError
+      raise
+    rescue Dalli::DalliError => e
+      Dalli.logger.debug { e.inspect }
+      Dalli.logger.debug { "Results from server: #{server.name} will be missing from the results" }
+      raise
+    end
+
+    # Swallows Dalli::NetworkError
+    def abort_without_timeout(servers)
+      servers.each(&:pipeline_abort)
+    end
+
+    def fetch_responses(servers, start_time, timeout, &block)
+      # Remove any servers which are not connected
+      servers.delete_if { |s| !s.connected? }
+      return [] if servers.empty?
+
+      time_left = remaining_time(start_time, timeout)
+      readable_servers = servers_with_response(servers, time_left)
+      if readable_servers.empty?
+        abort_with_timeout(servers)
+        return []
+      end
+
+      # Loop through the servers with responses, and
+      # delete any from our list that are finished
+      readable_servers.each do |server|
+        servers.delete(server) if process_server(server, &block)
+      end
+      servers
+    rescue NetworkError
+      # Abort and raise if we encountered a network error.  This triggers
+      # a retry at the top level.
+      abort_without_timeout(servers)
+      raise
+    end
+
+    def remaining_time(start, timeout)
+      elapsed = Time.now - start
+      return 0 if elapsed > timeout
+
+      timeout - elapsed
+    end
+
+    # Swallows Dalli::NetworkError
+    def abort_with_timeout(servers)
+      abort_without_timeout(servers)
+      servers.each do |server|
+        Dalli.logger.debug { "memcached at #{server.name} did not response within timeout" }
+      end
+
+      true # Required to simplify caller
+    end
+
+    # Processes responses from a server.  Returns true if there are no
+    # additional responses from this server.
+    def process_server(server)
+      server.pipeline_next_responses.each_pair do |key, value_list|
+        yield @key_manager.key_without_namespace(key), value_list
+      end
+
+      server.pipeline_complete?
+    end
+
+    def servers_with_response(servers, timeout)
+      return [] if servers.empty?
+
+      # TODO: - This is a bit challenging.  Essentially the PipelinedGetter
+      # is a reactor, but without the benefit of a Fiber or separate thread.
+      # My suspicion is that we may want to try and push this down into the
+      # individual servers, but I'm not sure.  For now, we keep the
+      # mapping between the alerted object (the socket) and the
+      # corrresponding server here.
+      server_map = servers.each_with_object({}) { |s, h| h[s.sock] = s }
+
+      readable, = IO.select(server_map.keys, nil, nil, timeout)
+      return [] if readable.nil?
+
+      readable.map { |sock| server_map[sock] }
+    end
+
+    def groups_for_keys(*keys)
+      keys.flatten!
+      keys.map! { |a| @key_manager.validate_key(a.to_s) }
+      groups = @ring.keys_grouped_by_server(keys)
+      if (unfound_keys = groups.delete(nil))
+        Dalli.logger.debug do
+          "unable to get keys for #{unfound_keys.length} keys " \
+            'because no matching server was found'
+        end
+      end
+      groups
+    end
+  end
+end
diff --git a/lib/dalli/protocol.rb b/lib/dalli/protocol.rb
index 1a7bac1..9b073f3 100644
--- a/lib/dalli/protocol.rb
+++ b/lib/dalli/protocol.rb
@@ -1,8 +1,19 @@
 # frozen_string_literal: true
 
+require 'timeout'
+
 module Dalli
   module Protocol
     # Preserved for backwards compatibility.  Should be removed in 4.0
     NOT_FOUND = ::Dalli::NOT_FOUND
+
+    # Ruby 3.2 raises IO::TimeoutError on blocking reads/writes, but
+    # it is not defined in earlier Ruby versions.
+    TIMEOUT_ERRORS =
+      if defined?(IO::TimeoutError)
+        [Timeout::Error, IO::TimeoutError]
+      else
+        [Timeout::Error]
+      end
   end
 end
diff --git a/lib/dalli/protocol/base.rb b/lib/dalli/protocol/base.rb
new file mode 100644
index 0000000..74274f8
--- /dev/null
+++ b/lib/dalli/protocol/base.rb
@@ -0,0 +1,250 @@
+# frozen_string_literal: true
+
+require 'forwardable'
+require 'socket'
+require 'timeout'
+
+module Dalli
+  module Protocol
+    ##
+    # Base class for a single Memcached server, containing logic common to all
+    # protocols.  Contains logic for managing connection state to the server and value
+    # handling.
+    ##
+    class Base
+      extend Forwardable
+
+      attr_accessor :weight, :options
+
+      def_delegators :@value_marshaller, :serializer, :compressor, :compression_min_size, :compress_by_default?
+      def_delegators :@connection_manager, :name, :sock, :hostname, :port, :close, :connected?, :socket_timeout,
+                     :socket_type, :up!, :down!, :write, :reconnect_down_server?, :raise_down_error
+
+      def initialize(attribs, client_options = {})
+        hostname, port, socket_type, @weight, user_creds = ServerConfigParser.parse(attribs)
+        @options = client_options.merge(user_creds)
+        @value_marshaller = ValueMarshaller.new(@options)
+        @connection_manager = ConnectionManager.new(hostname, port, socket_type, @options)
+      end
+
+      # Chokepoint method for error handling and ensuring liveness
+      def request(opkey, *args)
+        verify_state(opkey)
+
+        begin
+          @connection_manager.start_request!
+          response = send(opkey, *args)
+
+          # pipelined_get emit query but doesn't read the response(s)
+          @connection_manager.finish_request! unless opkey == :pipelined_get
+
+          response
+        rescue Dalli::MarshalError => e
+          log_marshal_err(args.first, e)
+          raise
+        rescue Dalli::DalliError
+          raise
+        rescue StandardError => e
+          log_unexpected_err(e)
+          close
+          raise
+        end
+      end
+
+      ##
+      # Boolean method used by clients of this class to determine if this
+      # particular memcached instance is available for use.
+      def alive?
+        ensure_connected!
+      rescue Dalli::NetworkError
+        # ensure_connected! raises a NetworkError if connection fails.  We
+        # want to capture that error and convert it to a boolean value here.
+        false
+      end
+
+      def lock!; end
+
+      def unlock!; end
+
+      # Start reading key/value pairs from this connection. This is usually called
+      # after a series of GETKQ commands. A NOOP is sent, and the server begins
+      # flushing responses for kv pairs that were found.
+      #
+      # Returns nothing.
+      def pipeline_response_setup
+        verify_pipelined_state(:getkq)
+        write_noop
+        response_buffer.reset
+      end
+
+      # Attempt to receive and parse as many key/value pairs as possible
+      # from this server. After #pipeline_response_setup, this should be invoked
+      # repeatedly whenever this server's socket is readable until
+      # #pipeline_complete?.
+      #
+      # Returns a Hash of kv pairs received.
+      def pipeline_next_responses
+        reconnect_on_pipeline_complete!
+        values = {}
+
+        response_buffer.read
+
+        status, cas, key, value = response_buffer.process_single_getk_response
+        # status is not nil only if we have a full response to parse
+        # in the buffer
+        until status.nil?
+          # If the status is ok and key is nil, then this is the response
+          # to the noop at the end of the pipeline
+          finish_pipeline && break if status && key.nil?
+
+          # If the status is ok and the key is not nil, then this is a
+          # getkq response with a value that we want to set in the response hash
+          values[key] = [value, cas] unless key.nil?
+
+          # Get the next response from the buffer
+          status, cas, key, value = response_buffer.process_single_getk_response
+        end
+
+        values
+      rescue SystemCallError, *TIMEOUT_ERRORS, EOFError => e
+        @connection_manager.error_on_request!(e)
+      end
+
+      # Abort current pipelined get. Generally used to signal an external
+      # timeout during pipelined get.  The underlying socket is
+      # disconnected, and the exception is swallowed.
+      #
+      # Returns nothing.
+      def pipeline_abort
+        response_buffer.clear
+        @connection_manager.abort_request!
+        return true unless connected?
+
+        # Closes the connection, which ensures that our connection
+        # is in a clean state for future requests
+        @connection_manager.error_on_request!('External timeout')
+      rescue NetworkError
+        true
+      end
+
+      # Did the last call to #pipeline_response_setup complete successfully?
+      def pipeline_complete?
+        !response_buffer.in_progress?
+      end
+
+      def username
+        @options[:username] || ENV.fetch('MEMCACHE_USERNAME', nil)
+      end
+
+      def password
+        @options[:password] || ENV.fetch('MEMCACHE_PASSWORD', nil)
+      end
+
+      def require_auth?
+        !username.nil?
+      end
+
+      def quiet?
+        Thread.current[::Dalli::QUIET]
+      end
+      alias multi? quiet?
+
+      # NOTE: Additional public methods should be overridden in Dalli::Threadsafe
+
+      private
+
+      ALLOWED_QUIET_OPS = %i[add replace set delete incr decr append prepend flush noop].freeze
+      def verify_allowed_quiet!(opkey)
+        return if ALLOWED_QUIET_OPS.include?(opkey)
+
+        raise Dalli::NotPermittedMultiOpError, "The operation #{opkey} is not allowed in a quiet block."
+      end
+
+      ##
+      # Checks to see if we can execute the specified operation.  Checks
+      # whether the connection is in use, and whether the command is allowed
+      ##
+      def verify_state(opkey)
+        @connection_manager.confirm_ready!
+        verify_allowed_quiet!(opkey) if quiet?
+
+        # The ensure_connected call has the side effect of connecting the
+        # underlying socket if it is not connected, or there's been a disconnect
+        # because of timeout or other error.  Method raises an error
+        # if it can't connect
+        raise_down_error unless ensure_connected!
+      end
+
+      def verify_pipelined_state(_opkey)
+        @connection_manager.confirm_in_progress!
+        raise_down_error unless connected?
+      end
+
+      # The socket connection to the underlying server is initialized as a side
+      # effect of this call.  In fact, this is the ONLY place where that
+      # socket connection is initialized.
+      #
+      # Both this method and connect need to be in this class so we can do auth
+      # as required
+      #
+      # Since this is invoked exclusively in verify_state!, we don't need to worry about
+      # thread safety.  Using it elsewhere may require revisiting that assumption.
+      def ensure_connected!
+        return true if connected?
+        return false unless reconnect_down_server?
+
+        connect # This call needs to be in this class so we can do auth
+        connected?
+      end
+
+      def cache_nils?(opts)
+        return false unless opts.is_a?(Hash)
+
+        opts[:cache_nils] ? true : false
+      end
+
+      def connect
+        @connection_manager.establish_connection
+        authenticate_connection if require_auth?
+        @version = version # Connect socket if not authed
+        up!
+      end
+
+      def pipelined_get(keys)
+        req = +''
+        keys.each do |key|
+          req << quiet_get_request(key)
+        end
+        # Could send noop here instead of in pipeline_response_setup
+        write(req)
+      end
+
+      def response_buffer
+        @response_buffer ||= ResponseBuffer.new(@connection_manager, response_processor)
+      end
+
+      # Called after the noop response is received at the end of a set
+      # of pipelined gets
+      def finish_pipeline
+        response_buffer.clear
+        @connection_manager.finish_request!
+
+        true # to simplify response
+      end
+
+      def reconnect_on_pipeline_complete!
+        @connection_manager.reconnect! 'pipelined get has completed' if pipeline_complete?
+      end
+
+      def log_marshal_err(key, err)
+        Dalli.logger.error "Marshalling error for key '#{key}': #{err.message}"
+        Dalli.logger.error 'You are trying to cache a Ruby object which cannot be serialized to memcached.'
+      end
+
+      def log_unexpected_err(err)
+        Dalli.logger.error "Unexpected exception during Dalli request: #{err.class.name}: #{err.message}"
+        Dalli.logger.error err.backtrace.join("\n\t")
+      end
+    end
+  end
+end
diff --git a/lib/dalli/protocol/binary.rb b/lib/dalli/protocol/binary.rb
index e197163..66f7151 100644
--- a/lib/dalli/protocol/binary.rb
+++ b/lib/dalli/protocol/binary.rb
@@ -1,14 +1,9 @@
 # frozen_string_literal: true
 
-require 'English'
 require 'forwardable'
 require 'socket'
 require 'timeout'
 
-require_relative 'binary/request_formatter'
-require_relative 'binary/response_processor'
-require_relative 'binary/sasl_authentication'
-
 module Dalli
   module Protocol
     ##
@@ -16,383 +11,63 @@ module Dalli
     # protocol.  Contains logic for managing connection state to the server (retries, etc),
     # formatting requests to the server, and unpacking responses.
     ##
-    class Binary
-      extend Forwardable
-
-      attr_accessor :hostname, :port, :weight, :options
-      attr_reader :sock, :socket_type
-
-      def_delegators :@value_marshaller, :serializer, :compressor, :compression_min_size, :compress_by_default?
-
-      DEFAULTS = {
-        # seconds between trying to contact a remote server
-        down_retry_delay: 30,
-        # connect/read/write timeout for socket operations
-        socket_timeout: 1,
-        # times a socket operation may fail before considering the server dead
-        socket_max_failures: 2,
-        # amount of time to sleep between retries when a failure occurs
-        socket_failure_delay: 0.1,
-        username: nil,
-        password: nil
-      }.freeze
-
-      def initialize(attribs, options = {})
-        @hostname, @port, @weight, @socket_type, options = ServerConfigParser.parse(attribs, options)
-        @options = DEFAULTS.merge(options)
-        @value_marshaller = ValueMarshaller.new(@options)
-        @response_processor = ResponseProcessor.new(self, @value_marshaller)
-
-        reset_down_info
-        @sock = nil
-        @pid = nil
-        @request_in_progress = false
-      end
-
-      def name
-        if socket_type == :unix
-          hostname
-        else
-          "#{hostname}:#{port}"
-        end
-      end
-
-      # Chokepoint method for error handling and ensuring liveness
-      def request(opcode, *args)
-        verify_state
-        # The alive? call has the side effect of connecting the underlying
-        # socket if it is not connected, or there's been a disconnect
-        # because of timeout or other error.  Method raises an error
-        # if it can't connect
-        raise_memcached_down_err unless alive?
-
-        begin
-          send(opcode, *args)
-        rescue Dalli::MarshalError => e
-          log_marshall_err(args.first, e)
-          raise
-        rescue Dalli::DalliError, Dalli::NetworkError, Dalli::ValueOverMaxSize, Timeout::Error
-          raise
-        rescue StandardError => e
-          log_unexpected_err(e)
-          down!
-        end
-      end
-
-      def raise_memcached_down_err
-        raise Dalli::NetworkError,
-              "#{name} is down: #{@error} #{@msg}. If you are sure it is running, "\
-              "ensure memcached version is > #{::Dalli::MIN_SUPPORTED_MEMCACHED_VERSION}."
-      end
-
-      def log_marshall_err(key, err)
-        Dalli.logger.error "Marshalling error for key '#{key}': #{err.message}"
-        Dalli.logger.error 'You are trying to cache a Ruby object which cannot be serialized to memcached.'
-      end
-
-      def log_unexpected_err(err)
-        Dalli.logger.error "Unexpected exception during Dalli request: #{err.class.name}: #{err.message}"
-        Dalli.logger.error err.backtrace.join("\n\t")
-      end
-
-      # The socket connection to the underlying server is initialized as a side
-      # effect of this call.  In fact, this is the ONLY place where that
-      # socket connection is initialized.
-      def alive?
-        return true if @sock
-        return false unless reconnect_down_server?
-
-        connect
-        !!@sock
-      rescue Dalli::NetworkError
-        false
-      end
-
-      def reconnect_down_server?
-        return true unless @last_down_at
-
-        time_to_next_reconnect = @last_down_at + options[:down_retry_delay] - Time.now
-        return true unless time_to_next_reconnect.positive?
-
-        Dalli.logger.debug do
-          format('down_retry_delay not reached for %<name>s (%<time>.3f seconds left)', name: name,
-                                                                                        time: time_to_next_reconnect)
-        end
-        false
-      end
-
-      # Closes the underlying socket and cleans up
-      # socket state.
-      def close
-        return unless @sock
-
-        begin
-          @sock.close
-        rescue StandardError
-          nil
-        end
-        @sock = nil
-        @pid = nil
-        abort_request!
-      end
-
-      def lock!; end
-
-      def unlock!; end
-
-      # Start reading key/value pairs from this connection. This is usually called
-      # after a series of GETKQ commands. A NOOP is sent, and the server begins
-      # flushing responses for kv pairs that were found.
-      #
-      # Returns nothing.
-      def multi_response_start
-        verify_state
-        write_noop
-        @multi_buffer = +''
-        @position = 0
-        start_request!
-      end
-
-      # Did the last call to #multi_response_start complete successfully?
-      def multi_response_completed?
-        @multi_buffer.nil?
-      end
-
-      # Attempt to receive and parse as many key/value pairs as possible
-      # from this server. After #multi_response_start, this should be invoked
-      # repeatedly whenever this server's socket is readable until
-      # #multi_response_completed?.
-      #
-      # Returns a Hash of kv pairs received.
-      def multi_response_nonblock
-        reconnect! 'multi_response has completed' if @multi_buffer.nil?
-
-        @multi_buffer << @sock.read_available
-        buf = @multi_buffer
-        pos = @position
-        values = {}
-
-        while buf.bytesize - pos >= ResponseProcessor::RESP_HEADER_SIZE
-          header = buf.slice(pos, ResponseProcessor::RESP_HEADER_SIZE)
-          _, extra_len, key_len, body_len, cas = @response_processor.unpack_header(header)
-
-          # We've reached the noop at the end of the pipeline
-          if key_len.zero?
-            finish_multi_response
-            break
-          end
-
-          # Break and read more unless we already have the entire response for this header
-          resp_size = ResponseProcessor::RESP_HEADER_SIZE + body_len
-          break unless buf.bytesize - pos >= resp_size
-
-          body = buf.slice(pos + ResponseProcessor::RESP_HEADER_SIZE, body_len)
-          begin
-            key, value = @response_processor.unpack_response_body(extra_len, key_len, body, true)
-            values[key] = [value, cas]
-          rescue DalliError
-            # TODO: Determine if we should be swallowing
-            # this error
-          end
-
-          pos = pos + ResponseProcessor::RESP_HEADER_SIZE + body_len
-        end
-        # TODO: We should be discarding the already processed buffer at this point
-        @position = pos
-
-        values
-      rescue SystemCallError, Timeout::Error, EOFError => e
-        failure!(e)
-      end
-
-      def finish_multi_response
-        @multi_buffer = nil
-        @position = nil
-        finish_request!
-      end
-
-      # Abort an earlier #multi_response_start. Used to signal an external
-      # timeout. The underlying socket is disconnected, and the exception is
-      # swallowed.
-      #
-      # Returns nothing.
-      def multi_response_abort
-        @multi_buffer = nil
-        @position = nil
-        abort_request!
-        return true unless @sock
-
-        failure!(RuntimeError.new('External timeout'))
-      rescue NetworkError
-        true
-      end
-
-      def read(count)
-        start_request!
-        data = @sock.readfull(count)
-        finish_request!
-        data
-      rescue SystemCallError, Timeout::Error, EOFError => e
-        failure!(e)
+    class Binary < Base
+      def response_processor
+        @response_processor ||= ResponseProcessor.new(@connection_manager, @value_marshaller)
       end
 
-      def write(bytes)
-        start_request!
-        result = @sock.write(bytes)
-        finish_request!
-        result
-      rescue SystemCallError, Timeout::Error => e
-        failure!(e)
-      end
-
-      def socket_timeout
-        @socket_timeout ||= @options[:socket_timeout]
-      end
-
-      # NOTE: Additional public methods should be overridden in Dalli::Threadsafe
-
       private
 
-      def request_in_progress?
-        @request_in_progress
-      end
-
-      def start_request!
-        @request_in_progress = true
-      end
-
-      def finish_request!
-        @request_in_progress = false
-      end
-
-      def abort_request!
-        @request_in_progress = false
-      end
-
-      def verify_state
-        failure!(RuntimeError.new('Already writing to socket')) if request_in_progress?
-        reconnect_on_fork if fork_detected?
-      end
-
-      def fork_detected?
-        @pid && @pid != Process.pid
-      end
-
-      def reconnect_on_fork
-        message = 'Fork detected, re-connecting child process...'
-        Dalli.logger.info { message }
-        reconnect! message
-      end
-
-      # Marks the server instance as needing reconnect.  Raises a
-      # Dalli::NetworkError with the specified message.  Calls close
-      # to clean up socket state
-      def reconnect!(message)
-        close
-        sleep(options[:socket_failure_delay]) if options[:socket_failure_delay]
-        raise Dalli::NetworkError, message
-      end
-
-      # Raises Dalli::NetworkError
-      def failure!(exception)
-        message = "#{name} failed (count: #{@fail_count}) #{exception.class}: #{exception.message}"
-        Dalli.logger.warn { message }
-
-        @fail_count += 1
-        if @fail_count >= options[:socket_max_failures]
-          down!
-        else
-          reconnect! 'Socket operation failed, retrying...'
-        end
-      end
-
-      # Marks the server instance as down.  Updates the down_at state
-      # and raises an Dalli::NetworkError that includes the underlying
-      # error in the message.  Calls close to clean up socket state
-      def down!
-        close
-        log_down_detected
-
-        @error = $ERROR_INFO&.class&.name
-        @msg ||= $ERROR_INFO&.message
-        raise Dalli::NetworkError, "#{name} is down: #{@error} #{@msg}"
-      end
-
-      def log_down_detected
-        @last_down_at = Time.now
-
-        if @down_at
-          time = Time.now - @down_at
-          Dalli.logger.debug { format('%<name>s is still down (for %<time>.3f seconds now)', name: name, time: time) }
-        else
-          @down_at = @last_down_at
-          Dalli.logger.warn("#{name} is down")
-        end
-      end
-
-      def log_up_detected
-        return unless @down_at
-
-        time = Time.now - @down_at
-        Dalli.logger.warn { format('%<name>s is back (downtime was %<time>.3f seconds)', name: name, time: time) }
-      end
-
-      def up!
-        log_up_detected
-        reset_down_info
+      # Retrieval Commands
+      def get(key, options = nil)
+        req = RequestFormatter.standard_request(opkey: :get, key: key)
+        write(req)
+        response_processor.get(cache_nils: cache_nils?(options))
       end
 
-      def reset_down_info
-        @fail_count = 0
-        @down_at = nil
-        @last_down_at = nil
-        @msg = nil
-        @error = nil
+      def quiet_get_request(key)
+        RequestFormatter.standard_request(opkey: :getkq, key: key)
       end
 
-      def multi?
-        Thread.current[:dalli_multi]
+      def gat(key, ttl, options = nil)
+        ttl = TtlSanitizer.sanitize(ttl)
+        req = RequestFormatter.standard_request(opkey: :gat, key: key, ttl: ttl)
+        write(req)
+        response_processor.get(cache_nils: cache_nils?(options))
       end
 
-      def cache_nils?(opts)
-        return false unless opts.is_a?(Hash)
-
-        opts[:cache_nils] ? true : false
+      def touch(key, ttl)
+        ttl = TtlSanitizer.sanitize(ttl)
+        write(RequestFormatter.standard_request(opkey: :touch, key: key, ttl: ttl))
+        response_processor.generic_response
       end
 
-      def get(key, options = nil)
+      # TODO: This is confusing, as there's a cas command in memcached
+      # and this isn't it.  Maybe rename?  Maybe eliminate?
+      def cas(key)
         req = RequestFormatter.standard_request(opkey: :get, key: key)
         write(req)
-        @response_processor.generic_response(unpack: true, cache_nils: cache_nils?(options))
-      end
-
-      def send_multiget(keys)
-        req = +''
-        keys.each do |key|
-          req << RequestFormatter.standard_request(opkey: :getkq, key: key)
-        end
-        # Could send noop here instead of in multi_response_start
-        write(req)
+        response_processor.data_cas_response
       end
 
+      # Storage Commands
       def set(key, value, ttl, cas, options)
-        opkey = multi? ? :setq : :set
-        process_value_req(opkey, key, value, ttl, cas, options)
+        opkey = quiet? ? :setq : :set
+        storage_req(opkey, key, value, ttl, cas, options)
       end
 
       def add(key, value, ttl, options)
-        opkey = multi? ? :addq : :add
-        cas = 0
-        process_value_req(opkey, key, value, ttl, cas, options)
+        opkey = quiet? ? :addq : :add
+        storage_req(opkey, key, value, ttl, 0, options)
       end
 
       def replace(key, value, ttl, cas, options)
-        opkey = multi? ? :replaceq : :replace
-        process_value_req(opkey, key, value, ttl, cas, options)
+        opkey = quiet? ? :replaceq : :replace
+        storage_req(opkey, key, value, ttl, cas, options)
       end
 
       # rubocop:disable Metrics/ParameterLists
-      def process_value_req(opkey, key, value, ttl, cas, options)
+      def storage_req(opkey, key, value, ttl, cas, options)
         (value, bitflags) = @value_marshaller.store(key, value, options)
         ttl = TtlSanitizer.sanitize(ttl)
 
@@ -400,21 +75,42 @@ module Dalli
                                                 value: value, bitflags: bitflags,
                                                 ttl: ttl, cas: cas)
         write(req)
-        @response_processor.cas_response unless multi?
+        response_processor.storage_response unless quiet?
       end
       # rubocop:enable Metrics/ParameterLists
 
+      def append(key, value)
+        opkey = quiet? ? :appendq : :append
+        write_append_prepend opkey, key, value
+      end
+
+      def prepend(key, value)
+        opkey = quiet? ? :prependq : :prepend
+        write_append_prepend opkey, key, value
+      end
+
+      def write_append_prepend(opkey, key, value)
+        write(RequestFormatter.standard_request(opkey: opkey, key: key, value: value))
+        response_processor.no_body_response unless quiet?
+      end
+
+      # Delete Commands
       def delete(key, cas)
-        opkey = multi? ? :deleteq : :delete
+        opkey = quiet? ? :deleteq : :delete
         req = RequestFormatter.standard_request(opkey: opkey, key: key, cas: cas)
         write(req)
-        @response_processor.generic_response unless multi?
+        response_processor.delete unless quiet?
       end
 
-      def flush(ttl = 0)
-        req = RequestFormatter.standard_request(opkey: :flush, ttl: ttl)
-        write(req)
-        @response_processor.generic_response
+      # Arithmetic Commands
+      def decr(key, count, ttl, initial)
+        opkey = quiet? ? :decrq : :decr
+        decr_incr opkey, key, count, ttl, initial
+      end
+
+      def incr(key, count, ttl, initial)
+        opkey = quiet? ? :incrq : :incr
+        decr_incr opkey, key, count, ttl, initial
       end
 
       # This allows us to special case a nil initial value, and
@@ -429,115 +125,48 @@ module Dalli
         initial ||= 0
         write(RequestFormatter.decr_incr_request(opkey: opkey, key: key,
                                                  count: count, initial: initial, expiry: expiry))
-        @response_processor.decr_incr_response
-      end
-
-      def decr(key, count, ttl, initial)
-        decr_incr :decr, key, count, ttl, initial
+        response_processor.decr_incr unless quiet?
       end
 
-      def incr(key, count, ttl, initial)
-        decr_incr :incr, key, count, ttl, initial
-      end
-
-      def write_append_prepend(opkey, key, value)
-        write_generic RequestFormatter.standard_request(opkey: opkey, key: key, value: value)
-      end
-
-      def write_generic(bytes)
-        write(bytes)
-        @response_processor.generic_response
-      end
-
-      def write_noop
-        req = RequestFormatter.standard_request(opkey: :noop)
-        write(req)
+      # Other Commands
+      def flush(ttl = 0)
+        opkey = quiet? ? :flushq : :flush
+        write(RequestFormatter.standard_request(opkey: opkey, ttl: ttl))
+        response_processor.no_body_response unless quiet?
       end
 
       # Noop is a keepalive operation but also used to demarcate the end of a set of pipelined commands.
       # We need to read all the responses at once.
       def noop
         write_noop
-        @response_processor.multi_with_keys_response
-      end
-
-      def append(key, value)
-        write_append_prepend :append, key, value
-      end
-
-      def prepend(key, value)
-        write_append_prepend :prepend, key, value
+        response_processor.consume_all_responses_until_noop
       end
 
       def stats(info = '')
         req = RequestFormatter.standard_request(opkey: :stat, key: info)
         write(req)
-        @response_processor.multi_with_keys_response
+        response_processor.stats
       end
 
       def reset_stats
-        write_generic RequestFormatter.standard_request(opkey: :stat, key: 'reset')
-      end
-
-      def cas(key)
-        req = RequestFormatter.standard_request(opkey: :get, key: key)
-        write(req)
-        @response_processor.data_cas_response
+        write(RequestFormatter.standard_request(opkey: :stat, key: 'reset'))
+        response_processor.reset
       end
 
       def version
-        write_generic RequestFormatter.standard_request(opkey: :version)
+        write(RequestFormatter.standard_request(opkey: :version))
+        response_processor.version
       end
 
-      def touch(key, ttl)
-        ttl = TtlSanitizer.sanitize(ttl)
-        write_generic RequestFormatter.standard_request(opkey: :touch, key: key, ttl: ttl)
-      end
-
-      def gat(key, ttl, options = nil)
-        ttl = TtlSanitizer.sanitize(ttl)
-        req = RequestFormatter.standard_request(opkey: :gat, key: key, ttl: ttl)
+      def write_noop
+        req = RequestFormatter.standard_request(opkey: :noop)
         write(req)
-        @response_processor.generic_response(unpack: true, cache_nils: cache_nils?(options))
-      end
-
-      def connect
-        Dalli.logger.debug { "Dalli::Server#connect #{name}" }
-
-        begin
-          @pid = Process.pid
-          @sock = memcached_socket
-          authenticate_connection if require_auth?
-          @version = version # Connect socket if not authed
-          up!
-        rescue Dalli::DalliError # SASL auth failure
-          raise
-        rescue SystemCallError, Timeout::Error, EOFError, SocketError => e
-          # SocketError = DNS resolution failure
-          failure!(e)
-        end
-      end
-
-      def memcached_socket
-        if socket_type == :unix
-          Dalli::Socket::UNIX.open(hostname, self, options)
-        else
-          Dalli::Socket::TCP.open(hostname, port, self, options)
-        end
-      end
-
-      def require_auth?
-        !username.nil?
-      end
-
-      def username
-        @options[:username] || ENV['MEMCACHE_USERNAME']
-      end
-
-      def password
-        @options[:password] || ENV['MEMCACHE_PASSWORD']
       end
 
+      require_relative 'binary/request_formatter'
+      require_relative 'binary/response_header'
+      require_relative 'binary/response_processor'
+      require_relative 'binary/sasl_authentication'
       include SaslAuthentication
     end
   end
diff --git a/lib/dalli/protocol/binary/request_formatter.rb b/lib/dalli/protocol/binary/request_formatter.rb
index f167ecf..a147f89 100644
--- a/lib/dalli/protocol/binary/request_formatter.rb
+++ b/lib/dalli/protocol/binary/request_formatter.rb
@@ -31,11 +31,14 @@ module Dalli
           deleteq: 0x14,
           incrq: 0x15,
           decrq: 0x16,
+          flushq: 0x18,
+          appendq: 0x19,
+          prependq: 0x1A,
+          touch: 0x1C,
+          gat: 0x1D,
           auth_negotiation: 0x20,
           auth_request: 0x21,
-          auth_continue: 0x22,
-          touch: 0x1C,
-          gat: 0x1D
+          auth_continue: 0x22
         }.freeze
 
         REQ_HEADER_FORMAT = 'CCnCCnNNQ'
@@ -56,6 +59,8 @@ module Dalli
 
           append: KEY_AND_VALUE,
           prepend: KEY_AND_VALUE,
+          appendq: KEY_AND_VALUE,
+          prependq: KEY_AND_VALUE,
           auth_request: KEY_AND_VALUE,
           auth_continue: KEY_AND_VALUE,
 
@@ -68,8 +73,11 @@ module Dalli
 
           incr: INCR_DECR,
           decr: INCR_DECR,
+          incrq: INCR_DECR,
+          decrq: INCR_DECR,
 
           flush: TTL_ONLY,
+          flushq: TTL_ONLY,
 
           noop: NO_BODY,
           auth_negotiation: NO_BODY,
@@ -78,7 +86,7 @@ module Dalli
           touch: TTL_AND_KEY,
           gat: TTL_AND_KEY
         }.freeze
-        FORMAT = BODY_FORMATS.transform_values { |v| REQ_HEADER_FORMAT + v; }
+        FORMAT = BODY_FORMATS.transform_values { |v| REQ_HEADER_FORMAT + v }
 
         # rubocop:disable Metrics/ParameterLists
         def self.standard_request(opkey:, key: nil, value: nil, opaque: 0, cas: 0, bitflags: nil, ttl: nil)
@@ -86,7 +94,7 @@ module Dalli
           key_len = key.nil? ? 0 : key.bytesize
           value_len = value.nil? ? 0 : value.bytesize
           header = [REQUEST, OPCODES[opkey], key_len, extra_len, 0, 0, extra_len + key_len + value_len, opaque, cas]
-          body = [bitflags, ttl, key, value].reject(&:nil?)
+          body = [bitflags, ttl, key, value].compact
           (header + body).pack(FORMAT[opkey])
         end
         # rubocop:enable Metrics/ParameterLists
@@ -101,7 +109,7 @@ module Dalli
         end
 
         def self.as_8byte_uint(val)
-          [val >> 32, 0xFFFFFFFF & val]
+          [val >> 32, val & 0xFFFFFFFF]
         end
       end
     end
diff --git a/lib/dalli/protocol/binary/response_header.rb b/lib/dalli/protocol/binary/response_header.rb
new file mode 100644
index 0000000..5470d61
--- /dev/null
+++ b/lib/dalli/protocol/binary/response_header.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Dalli
+  module Protocol
+    class Binary
+      ##
+      # Class that encapsulates data parsed from a memcached response header.
+      ##
+      class ResponseHeader
+        SIZE = 24
+        FMT = '@2nCCnNNQ'
+
+        attr_reader :key_len, :extra_len, :data_type, :status, :body_len, :opaque, :cas
+
+        def initialize(buf)
+          raise ArgumentError, "Response buffer must be at least #{SIZE} bytes" unless buf.bytesize >= SIZE
+
+          @key_len, @extra_len, @data_type, @status, @body_len, @opaque, @cas = buf.unpack(FMT)
+        end
+
+        def ok?
+          status.zero?
+        end
+
+        def not_found?
+          status == 1
+        end
+
+        NOT_STORED_STATUSES = [2, 5].freeze
+        def not_stored?
+          NOT_STORED_STATUSES.include?(status)
+        end
+      end
+    end
+  end
+end
diff --git a/lib/dalli/protocol/binary/response_processor.rb b/lib/dalli/protocol/binary/response_processor.rb
index 5551714..a11c1b8 100644
--- a/lib/dalli/protocol/binary/response_processor.rb
+++ b/lib/dalli/protocol/binary/response_processor.rb
@@ -9,9 +9,6 @@ module Dalli
       # and parsing into local values.  Handles errors on unexpected values.
       ##
       class ResponseProcessor
-        RESP_HEADER = '@2nCCnNNQ'
-        RESP_HEADER_SIZE = 24
-
         # Response codes taken from:
         # https://github.com/memcached/memcached/wiki/BinaryProtocolRevamped#response-status
         RESPONSE_CODES = {
@@ -44,91 +41,138 @@ module Dalli
         end
 
         def read_response
-          status, extra_len, key_len, body_len, cas = unpack_header(read_header)
-          body = read(body_len) if body_len.positive?
-          [status, extra_len, body, cas, key_len]
-        end
-
-        def unpack_header(header)
-          (key_len, extra_len, _, status, body_len, _, cas) = header.unpack(RESP_HEADER)
-          [status, extra_len, key_len, body_len, cas]
-        end
-
-        def unpack_response_body(extra_len, key_len, body, unpack)
-          bitflags = extra_len.positive? ? body.byteslice(0, extra_len).unpack1('N') : 0x0
-          key = body.byteslice(extra_len, key_len) if key_len.positive?
-          value = body.byteslice(extra_len + key_len, body.bytesize - (extra_len + key_len))
-          value = unpack ? @value_marshaller.retrieve(value, bitflags) : value
+          resp_header = ResponseHeader.new(read_header)
+          body = read(resp_header.body_len) if resp_header.body_len.positive?
+          [resp_header, body]
+        end
+
+        def unpack_response_body(resp_header, body, parse_as_stored_value)
+          extra_len = resp_header.extra_len
+          key_len = resp_header.key_len
+          bitflags = extra_len.positive? ? body.unpack1('N') : 0x0
+          key = body.byteslice(extra_len, key_len).force_encoding(Encoding::UTF_8) if key_len.positive?
+          value = body.byteslice((extra_len + key_len)..-1)
+          value = @value_marshaller.retrieve(value, bitflags) if parse_as_stored_value
           [key, value]
         end
 
         def read_header
-          read(RESP_HEADER_SIZE) || raise(Dalli::NetworkError, 'No response')
+          read(ResponseHeader::SIZE) || raise(Dalli::NetworkError, 'No response')
         end
 
-        def not_found?(status)
-          status == 1
-        end
+        def raise_on_not_ok!(resp_header)
+          return if resp_header.ok?
 
-        NOT_STORED_STATUSES = [2, 5].freeze
-        def not_stored?(status)
-          NOT_STORED_STATUSES.include?(status)
+          raise Dalli::DalliError, "Response error #{resp_header.status}: #{RESPONSE_CODES[resp_header.status]}"
         end
 
-        def raise_on_not_ok_status!(status)
-          return if status.zero?
+        def get(cache_nils: false)
+          resp_header, body = read_response
 
-          raise Dalli::DalliError, "Response error #{status}: #{RESPONSE_CODES[status]}"
+          return false if resp_header.not_stored? # Not stored, normal status for add operation
+          return cache_nils ? ::Dalli::NOT_FOUND : nil if resp_header.not_found?
+
+          raise_on_not_ok!(resp_header)
+          return true unless body
+
+          unpack_response_body(resp_header, body, true).last
         end
 
-        def generic_response(unpack: false, cache_nils: false)
-          status, extra_len, body, _, key_len = read_response
+        ##
+        # Response for a storage operation.  Returns the cas on success.  False
+        # if the value wasn't stored.  And raises an error on all other error
+        # codes from memcached.
+        ##
+        def storage_response
+          resp_header, = read_response
+          return nil if resp_header.not_found?
+          return false if resp_header.not_stored? # Not stored, normal status for add operation
 
-          return cache_nils ? ::Dalli::NOT_FOUND : nil if not_found?(status)
-          return false if not_stored?(status) # Not stored, normal status for add operation
+          raise_on_not_ok!(resp_header)
+          resp_header.cas
+        end
 
-          raise_on_not_ok_status!(status)
-          return true unless body
+        def delete
+          resp_header, = read_response
+          return false if resp_header.not_found? || resp_header.not_stored?
 
-          unpack_response_body(extra_len, key_len, body, unpack).last
+          raise_on_not_ok!(resp_header)
+          true
         end
 
         def data_cas_response
-          status, extra_len, body, cas, key_len = read_response
-          return [nil, cas] if not_found?(status)
-          return [nil, false] if not_stored?(status)
+          resp_header, body = read_response
+          return [nil, resp_header.cas] if resp_header.not_found?
+          return [nil, false] if resp_header.not_stored?
 
-          raise_on_not_ok_status!(status)
-          return [nil, cas] unless body
+          raise_on_not_ok!(resp_header)
+          return [nil, resp_header.cas] unless body
 
-          [unpack_response_body(extra_len, key_len, body, true).last, cas]
+          [unpack_response_body(resp_header, body, true).last, resp_header.cas]
         end
 
-        def cas_response
-          data_cas_response.last
+        # Returns the new value for the key, if found and updated
+        def decr_incr
+          body = generic_response
+          body ? body.unpack1('Q>') : body
         end
 
-        def multi_with_keys_response
+        def stats
           hash = {}
           loop do
-            status, extra_len, body, _, key_len = read_response
+            resp_header, body = read_response
             # This is the response to the terminating noop / end of stat
-            return hash if status.zero? && key_len.zero?
+            return hash if resp_header.ok? && resp_header.key_len.zero?
 
             # Ignore any responses with non-zero status codes,
             # such as errors from set operations.  That allows
             # this code to be used at the end of a multi
             # block to clear any error responses from inside the multi.
-            next unless status.zero?
+            next unless resp_header.ok?
 
-            key, value = unpack_response_body(extra_len, key_len, body, true)
+            key, value = unpack_response_body(resp_header, body, true)
             hash[key] = value
           end
         end
 
-        def decr_incr_response
-          body = generic_response
-          body ? body.unpack1('Q>') : body
+        def flush
+          no_body_response
+        end
+
+        def reset
+          generic_response
+        end
+
+        def version
+          generic_response
+        end
+
+        def consume_all_responses_until_noop
+          loop do
+            resp_header, = read_response
+            # This is the response to the terminating noop / end of stat
+            return true if resp_header.ok? && resp_header.key_len.zero?
+          end
+        end
+
+        def generic_response
+          resp_header, body = read_response
+
+          return false if resp_header.not_stored? # Not stored, normal status for add operation
+          return nil if resp_header.not_found?
+
+          raise_on_not_ok!(resp_header)
+          return true unless body
+
+          unpack_response_body(resp_header, body, false).last
+        end
+
+        def no_body_response
+          resp_header, = read_response
+          return false if resp_header.not_stored? # Not stored, possible status for append/prepend/delete
+
+          raise_on_not_ok!(resp_header)
+          true
         end
 
         def validate_auth_format(extra_len, count)
@@ -137,11 +181,57 @@ module Dalli
           raise Dalli::NetworkError, "Unexpected message format: #{extra_len} #{count}"
         end
 
-        def auth_response
-          (_, extra_len, _, status, body_len,) = read_header.unpack(RESP_HEADER)
-          validate_auth_format(extra_len, body_len)
+        def auth_response(buf = read_header)
+          resp_header = ResponseHeader.new(buf)
+          body_len = resp_header.body_len
+          validate_auth_format(resp_header.extra_len, body_len)
           content = read(body_len) if body_len.positive?
-          [status, content]
+          [resp_header.status, content]
+        end
+
+        def contains_header?(buf)
+          return false unless buf
+
+          buf.bytesize >= ResponseHeader::SIZE
+        end
+
+        def response_header_from_buffer(buf)
+          ResponseHeader.new(buf)
+        end
+
+        ##
+        # This method returns an array of values used in a pipelined
+        # getk process.  The first value is the number of bytes by
+        # which to advance the pointer in the buffer.  If the
+        # complete response is found in the buffer, this will
+        # be the response size.  Otherwise it is zero.
+        #
+        # The remaining three values in the array are the ResponseHeader,
+        # key, and value.
+        ##
+        def getk_response_from_buffer(buf)
+          # There's no header in the buffer, so don't advance
+          return [0, nil, nil, nil, nil] unless contains_header?(buf)
+
+          resp_header = response_header_from_buffer(buf)
+          body_len = resp_header.body_len
+
+          # We have a complete response that has no body.
+          # This is either the response to the terminating
+          # noop or, if the status is not zero, an intermediate
+          # error response that needs to be discarded.
+          return [ResponseHeader::SIZE, resp_header.ok?, resp_header.cas, nil, nil] if body_len.zero?
+
+          resp_size = ResponseHeader::SIZE + body_len
+          # The header is in the buffer, but the body is not.  As we don't have
+          # a complete response, don't advance the buffer
+          return [0, nil, nil, nil, nil] unless buf.bytesize >= resp_size
+
+          # The full response is in our buffer, so parse it and return
+          # the values
+          body = buf.byteslice(ResponseHeader::SIZE, body_len)
+          key, value = unpack_response_body(resp_header, body, true)
+          [resp_size, resp_header.ok?, resp_header.cas, key, value]
         end
       end
     end
diff --git a/lib/dalli/protocol/binary/sasl_authentication.rb b/lib/dalli/protocol/binary/sasl_authentication.rb
index 2a03ad9..160330d 100644
--- a/lib/dalli/protocol/binary/sasl_authentication.rb
+++ b/lib/dalli/protocol/binary/sasl_authentication.rb
@@ -10,12 +10,12 @@ module Dalli
         def perform_auth_negotiation
           write(RequestFormatter.standard_request(opkey: :auth_negotiation))
 
-          status, content = @response_processor.auth_response
+          status, content = response_processor.auth_response
           return [status, []] if content.nil?
 
           # Substitute spaces for the \x00 returned by
           # memcached as a separator for easier
-          content&.tr("\u0000", ' ')
+          content&.tr!("\u0000", ' ')
           mechanisms = content&.split
           [status, mechanisms]
         end
diff --git a/lib/dalli/protocol/connection_manager.rb b/lib/dalli/protocol/connection_manager.rb
new file mode 100644
index 0000000..fe8bd91
--- /dev/null
+++ b/lib/dalli/protocol/connection_manager.rb
@@ -0,0 +1,255 @@
+# frozen_string_literal: true
+
+require 'English'
+require 'socket'
+require 'timeout'
+
+require 'dalli/pid_cache'
+
+module Dalli
+  module Protocol
+    ##
+    # Manages the socket connection to the server, including ensuring liveness
+    # and retries.
+    ##
+    class ConnectionManager
+      DEFAULTS = {
+        # seconds between trying to contact a remote server
+        down_retry_delay: 30,
+        # connect/read/write timeout for socket operations
+        socket_timeout: 1,
+        # times a socket operation may fail before considering the server dead
+        socket_max_failures: 2,
+        # amount of time to sleep between retries when a failure occurs
+        socket_failure_delay: 0.1,
+        # Set keepalive
+        keepalive: true
+      }.freeze
+
+      attr_accessor :hostname, :port, :socket_type, :options
+      attr_reader :sock
+
+      def initialize(hostname, port, socket_type, client_options)
+        @hostname = hostname
+        @port = port
+        @socket_type = socket_type
+        @options = DEFAULTS.merge(client_options)
+        @request_in_progress = false
+        @sock = nil
+        @pid = nil
+
+        reset_down_info
+      end
+
+      def name
+        if socket_type == :unix
+          hostname
+        else
+          "#{hostname}:#{port}"
+        end
+      end
+
+      def establish_connection
+        Dalli.logger.debug { "Dalli::Server#connect #{name}" }
+
+        @sock = memcached_socket
+        @pid = PIDCache.pid
+        @request_in_progress = false
+      rescue SystemCallError, *TIMEOUT_ERRORS, EOFError, SocketError => e
+        # SocketError = DNS resolution failure
+        error_on_request!(e)
+      end
+
+      def reconnect_down_server?
+        return true unless @last_down_at
+
+        time_to_next_reconnect = @last_down_at + options[:down_retry_delay] - Time.now
+        return true unless time_to_next_reconnect.positive?
+
+        Dalli.logger.debug do
+          format('down_retry_delay not reached for %<name>s (%<time>.3f seconds left)', name: name,
+                                                                                        time: time_to_next_reconnect)
+        end
+        false
+      end
+
+      def up!
+        log_up_detected
+        reset_down_info
+      end
+
+      # Marks the server instance as down.  Updates the down_at state
+      # and raises an Dalli::NetworkError that includes the underlying
+      # error in the message.  Calls close to clean up socket state
+      def down!
+        close
+        log_down_detected
+
+        @error = $ERROR_INFO&.class&.name
+        @msg ||= $ERROR_INFO&.message
+        raise_down_error
+      end
+
+      def raise_down_error
+        raise Dalli::NetworkError, "#{name} is down: #{@error} #{@msg}"
+      end
+
+      def socket_timeout
+        @socket_timeout ||= @options[:socket_timeout]
+      end
+
+      def confirm_ready!
+        close if request_in_progress?
+        close_on_fork if fork_detected?
+      end
+
+      def confirm_in_progress!
+        raise '[Dalli] No request in progress. This may be a bug in Dalli.' unless request_in_progress?
+
+        close_on_fork if fork_detected?
+      end
+
+      def close
+        return unless @sock
+
+        begin
+          @sock.close
+        rescue StandardError
+          nil
+        end
+        @sock = nil
+        @pid = nil
+        abort_request!
+      end
+
+      def connected?
+        !@sock.nil?
+      end
+
+      def request_in_progress?
+        @request_in_progress
+      end
+
+      def start_request!
+        raise '[Dalli] Request already in progress. This may be a bug in Dalli.' if @request_in_progress
+
+        @request_in_progress = true
+      end
+
+      def finish_request!
+        raise '[Dalli] No request in progress. This may be a bug in Dalli.' unless @request_in_progress
+
+        @request_in_progress = false
+      end
+
+      def abort_request!
+        @request_in_progress = false
+      end
+
+      def read_line
+        data = @sock.gets("\r\n")
+        error_on_request!('EOF in read_line') if data.nil?
+        data
+      rescue SystemCallError, *TIMEOUT_ERRORS, EOFError => e
+        error_on_request!(e)
+      end
+
+      def read(count)
+        @sock.readfull(count)
+      rescue SystemCallError, *TIMEOUT_ERRORS, EOFError => e
+        error_on_request!(e)
+      end
+
+      def write(bytes)
+        @sock.write(bytes)
+      rescue SystemCallError, *TIMEOUT_ERRORS => e
+        error_on_request!(e)
+      end
+
+      # Non-blocking read.  Here to support the operation
+      # of the get_multi operation
+      def read_nonblock
+        @sock.read_available
+      end
+
+      def max_allowed_failures
+        @max_allowed_failures ||= @options[:socket_max_failures] || 2
+      end
+
+      def error_on_request!(err_or_string)
+        log_warn_message(err_or_string)
+
+        @fail_count += 1
+        if @fail_count >= max_allowed_failures
+          down!
+        else
+          # Closes the existing socket, setting up for a reconnect
+          # on next request
+          reconnect!('Socket operation failed, retrying...')
+        end
+      end
+
+      def reconnect!(message)
+        close
+        sleep(options[:socket_failure_delay]) if options[:socket_failure_delay]
+        raise Dalli::NetworkError, message
+      end
+
+      def reset_down_info
+        @fail_count = 0
+        @down_at = nil
+        @last_down_at = nil
+        @msg = nil
+        @error = nil
+      end
+
+      def memcached_socket
+        if socket_type == :unix
+          Dalli::Socket::UNIX.open(hostname, options)
+        else
+          Dalli::Socket::TCP.open(hostname, port, options)
+        end
+      end
+
+      def log_warn_message(err_or_string)
+        detail = err_or_string.is_a?(String) ? err_or_string : "#{err_or_string.class}: #{err_or_string.message}"
+        Dalli.logger.warn do
+          detail = err_or_string.is_a?(String) ? err_or_string : "#{err_or_string.class}: #{err_or_string.message}"
+          "#{name} failed (count: #{@fail_count}) #{detail}"
+        end
+      end
+
+      def close_on_fork
+        message = 'Fork detected, re-connecting child process...'
+        Dalli.logger.info { message }
+        # Close socket on a fork, setting us up for reconnect
+        # on next request.
+        close
+        raise Dalli::NetworkError, message
+      end
+
+      def fork_detected?
+        @pid && @pid != PIDCache.pid
+      end
+
+      def log_down_detected
+        @last_down_at = Time.now
+
+        if @down_at
+          time = Time.now - @down_at
+          Dalli.logger.debug { format('%<name>s is still down (for %<time>.3f seconds now)', name: name, time: time) }
+        else
+          @down_at = @last_down_at
+          Dalli.logger.warn("#{name} is down")
+        end
+      end
+
+      def log_up_detected
+        return unless @down_at
+
+        time = Time.now - @down_at
+        Dalli.logger.warn { format('%<name>s is back (downtime was %<time>.3f seconds)', name: name, time: time) }
+      end
+    end
+  end
+end
diff --git a/lib/dalli/protocol/meta.rb b/lib/dalli/protocol/meta.rb
new file mode 100644
index 0000000..b2e66c3
--- /dev/null
+++ b/lib/dalli/protocol/meta.rb
@@ -0,0 +1,178 @@
+# frozen_string_literal: true
+
+require 'forwardable'
+require 'socket'
+require 'timeout'
+
+module Dalli
+  module Protocol
+    ##
+    # Access point for a single Memcached server, accessed via Memcached's meta
+    # protocol.  Contains logic for managing connection state to the server (retries, etc),
+    # formatting requests to the server, and unpacking responses.
+    ##
+    class Meta < Base
+      TERMINATOR = "\r\n"
+
+      def response_processor
+        @response_processor ||= ResponseProcessor.new(@connection_manager, @value_marshaller)
+      end
+
+      # NOTE: Additional public methods should be overridden in Dalli::Threadsafe
+
+      private
+
+      # Retrieval Commands
+      def get(key, options = nil)
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        req = RequestFormatter.meta_get(key: encoded_key, base64: base64)
+        write(req)
+        response_processor.meta_get_with_value(cache_nils: cache_nils?(options))
+      end
+
+      def quiet_get_request(key)
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        RequestFormatter.meta_get(key: encoded_key, return_cas: true, base64: base64, quiet: true)
+      end
+
+      def gat(key, ttl, options = nil)
+        ttl = TtlSanitizer.sanitize(ttl)
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        req = RequestFormatter.meta_get(key: encoded_key, ttl: ttl, base64: base64)
+        write(req)
+        response_processor.meta_get_with_value(cache_nils: cache_nils?(options))
+      end
+
+      def touch(key, ttl)
+        ttl = TtlSanitizer.sanitize(ttl)
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        req = RequestFormatter.meta_get(key: encoded_key, ttl: ttl, value: false, base64: base64)
+        write(req)
+        response_processor.meta_get_without_value
+      end
+
+      # TODO: This is confusing, as there's a cas command in memcached
+      # and this isn't it.  Maybe rename?  Maybe eliminate?
+      def cas(key)
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        req = RequestFormatter.meta_get(key: encoded_key, value: true, return_cas: true, base64: base64)
+        write(req)
+        response_processor.meta_get_with_value_and_cas
+      end
+
+      # Storage Commands
+      def set(key, value, ttl, cas, options)
+        write_storage_req(:set, key, value, ttl, cas, options)
+        response_processor.meta_set_with_cas unless quiet?
+      end
+
+      def add(key, value, ttl, options)
+        write_storage_req(:add, key, value, ttl, nil, options)
+        response_processor.meta_set_with_cas unless quiet?
+      end
+
+      def replace(key, value, ttl, cas, options)
+        write_storage_req(:replace, key, value, ttl, cas, options)
+        response_processor.meta_set_with_cas unless quiet?
+      end
+
+      # rubocop:disable Metrics/ParameterLists
+      def write_storage_req(mode, key, raw_value, ttl = nil, cas = nil, options = {})
+        (value, bitflags) = @value_marshaller.store(key, raw_value, options)
+        ttl = TtlSanitizer.sanitize(ttl) if ttl
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        req = RequestFormatter.meta_set(key: encoded_key, value: value,
+                                        bitflags: bitflags, cas: cas,
+                                        ttl: ttl, mode: mode, quiet: quiet?, base64: base64)
+        write(req)
+      end
+      # rubocop:enable Metrics/ParameterLists
+
+      def append(key, value)
+        write_append_prepend_req(:append, key, value)
+        response_processor.meta_set_append_prepend unless quiet?
+      end
+
+      def prepend(key, value)
+        write_append_prepend_req(:prepend, key, value)
+        response_processor.meta_set_append_prepend unless quiet?
+      end
+
+      # rubocop:disable Metrics/ParameterLists
+      def write_append_prepend_req(mode, key, value, ttl = nil, cas = nil, _options = {})
+        ttl = TtlSanitizer.sanitize(ttl) if ttl
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        req = RequestFormatter.meta_set(key: encoded_key, value: value, base64: base64,
+                                        cas: cas, ttl: ttl, mode: mode, quiet: quiet?)
+        write(req)
+      end
+      # rubocop:enable Metrics/ParameterLists
+
+      # Delete Commands
+      def delete(key, cas)
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        req = RequestFormatter.meta_delete(key: encoded_key, cas: cas,
+                                           base64: base64, quiet: quiet?)
+        write(req)
+        response_processor.meta_delete unless quiet?
+      end
+
+      # Arithmetic Commands
+      def decr(key, count, ttl, initial)
+        decr_incr false, key, count, ttl, initial
+      end
+
+      def incr(key, count, ttl, initial)
+        decr_incr true, key, count, ttl, initial
+      end
+
+      def decr_incr(incr, key, delta, ttl, initial)
+        ttl = initial ? TtlSanitizer.sanitize(ttl) : nil # Only set a TTL if we want to set a value on miss
+        encoded_key, base64 = KeyRegularizer.encode(key)
+        write(RequestFormatter.meta_arithmetic(key: encoded_key, delta: delta, initial: initial, incr: incr, ttl: ttl,
+                                               quiet: quiet?, base64: base64))
+        response_processor.decr_incr unless quiet?
+      end
+
+      # Other Commands
+      def flush(delay = 0)
+        write(RequestFormatter.flush(delay: delay))
+        response_processor.flush unless quiet?
+      end
+
+      # Noop is a keepalive operation but also used to demarcate the end of a set of pipelined commands.
+      # We need to read all the responses at once.
+      def noop
+        write_noop
+        response_processor.consume_all_responses_until_mn
+      end
+
+      def stats(info = nil)
+        write(RequestFormatter.stats(info))
+        response_processor.stats
+      end
+
+      def reset_stats
+        write(RequestFormatter.stats('reset'))
+        response_processor.reset
+      end
+
+      def version
+        write(RequestFormatter.version)
+        response_processor.version
+      end
+
+      def write_noop
+        write(RequestFormatter.meta_noop)
+      end
+
+      def authenticate_connection
+        raise Dalli::DalliError, 'Authentication not supported for the meta protocol.'
+      end
+
+      require_relative 'meta/key_regularizer'
+      require_relative 'meta/request_formatter'
+      require_relative 'meta/response_processor'
+    end
+  end
+end
diff --git a/lib/dalli/protocol/meta/key_regularizer.rb b/lib/dalli/protocol/meta/key_regularizer.rb
new file mode 100644
index 0000000..56b1860
--- /dev/null
+++ b/lib/dalli/protocol/meta/key_regularizer.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Dalli
+  module Protocol
+    class Meta
+      ##
+      # The meta protocol requires that keys be ASCII only, so Unicode keys are
+      # not supported.  In addition, the use of whitespace in the key is not
+      # allowed.
+      # memcached supports the use of base64 hashes for keys containing
+      # whitespace or non-ASCII characters, provided the 'b' flag is included in the request.
+      class KeyRegularizer
+        WHITESPACE = /\s/.freeze
+
+        def self.encode(key)
+          return [key, false] if key.ascii_only? && !WHITESPACE.match(key)
+
+          strict_base64_encoded = [key].pack('m0')
+          [strict_base64_encoded, true]
+        end
+
+        def self.decode(encoded_key, base64_encoded)
+          return encoded_key unless base64_encoded
+
+          strict_base64_decoded = encoded_key.unpack1('m0')
+          strict_base64_decoded.force_encoding(Encoding::UTF_8)
+        end
+      end
+    end
+  end
+end
diff --git a/lib/dalli/protocol/meta/request_formatter.rb b/lib/dalli/protocol/meta/request_formatter.rb
new file mode 100644
index 0000000..7e485fe
--- /dev/null
+++ b/lib/dalli/protocol/meta/request_formatter.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: false
+
+module Dalli
+  module Protocol
+    class Meta
+      ##
+      # Class that encapsulates logic for formatting meta protocol requests
+      # to memcached.
+      ##
+      class RequestFormatter
+        # Since these are string construction methods, we're going to disable these
+        # Rubocop directives.  We really can't make this construction much simpler,
+        # and introducing an intermediate object seems like overkill.
+        #
+        # rubocop:disable Metrics/CyclomaticComplexity
+        # rubocop:disable Metrics/MethodLength
+        # rubocop:disable Metrics/ParameterLists
+        # rubocop:disable Metrics/PerceivedComplexity
+        def self.meta_get(key:, value: true, return_cas: false, ttl: nil, base64: false, quiet: false)
+          cmd = "mg #{key}"
+          cmd << ' v f' if value
+          cmd << ' c' if return_cas
+          cmd << ' b' if base64
+          cmd << " T#{ttl}" if ttl
+          cmd << ' k q s' if quiet # Return the key in the response if quiet
+          cmd + TERMINATOR
+        end
+
+        def self.meta_set(key:, value:, bitflags: nil, cas: nil, ttl: nil, mode: :set, base64: false, quiet: false)
+          cmd = "ms #{key} #{value.bytesize}"
+          cmd << ' c' unless %i[append prepend].include?(mode)
+          cmd << ' b' if base64
+          cmd << " F#{bitflags}" if bitflags
+          cmd << cas_string(cas)
+          cmd << " T#{ttl}" if ttl
+          cmd << " M#{mode_to_token(mode)}"
+          cmd << ' q' if quiet
+          cmd << TERMINATOR
+          cmd << value
+          cmd + TERMINATOR
+        end
+
+        def self.meta_delete(key:, cas: nil, ttl: nil, base64: false, quiet: false)
+          cmd = "md #{key}"
+          cmd << ' b' if base64
+          cmd << cas_string(cas)
+          cmd << " T#{ttl}" if ttl
+          cmd << ' q' if quiet
+          cmd + TERMINATOR
+        end
+
+        def self.meta_arithmetic(key:, delta:, initial:, incr: true, cas: nil, ttl: nil, base64: false, quiet: false)
+          cmd = "ma #{key} v"
+          cmd << ' b' if base64
+          cmd << " D#{delta}" if delta
+          cmd << " J#{initial}" if initial
+          # Always set a TTL if an initial value is specified
+          cmd << " N#{ttl || 0}" if ttl || initial
+          cmd << cas_string(cas)
+          cmd << ' q' if quiet
+          cmd << " M#{incr ? 'I' : 'D'}"
+          cmd + TERMINATOR
+        end
+        # rubocop:enable Metrics/CyclomaticComplexity
+        # rubocop:enable Metrics/MethodLength
+        # rubocop:enable Metrics/ParameterLists
+        # rubocop:enable Metrics/PerceivedComplexity
+
+        def self.meta_noop
+          "mn#{TERMINATOR}"
+        end
+
+        def self.version
+          "version#{TERMINATOR}"
+        end
+
+        def self.flush(delay: nil, quiet: false)
+          cmd = +'flush_all'
+          cmd << " #{parse_to_64_bit_int(delay, 0)}" if delay
+          cmd << ' noreply' if quiet
+          cmd + TERMINATOR
+        end
+
+        def self.stats(arg = nil)
+          cmd = +'stats'
+          cmd << " #{arg}" if arg
+          cmd + TERMINATOR
+        end
+
+        # rubocop:disable Metrics/MethodLength
+        def self.mode_to_token(mode)
+          case mode
+          when :add
+            'E'
+          when :replace
+            'R'
+          when :append
+            'A'
+          when :prepend
+            'P'
+          else
+            'S'
+          end
+        end
+        # rubocop:enable Metrics/MethodLength
+
+        def self.cas_string(cas)
+          cas = parse_to_64_bit_int(cas, nil)
+          cas.nil? || cas.zero? ? '' : " C#{cas}"
+        end
+
+        def self.parse_to_64_bit_int(val, default)
+          val.nil? ? nil : Integer(val)
+        rescue ArgumentError
+          # Sanitize to default if it isn't parsable as an integer
+          default
+        end
+      end
+    end
+  end
+end
diff --git a/lib/dalli/protocol/meta/response_processor.rb b/lib/dalli/protocol/meta/response_processor.rb
new file mode 100644
index 0000000..be35b5d
--- /dev/null
+++ b/lib/dalli/protocol/meta/response_processor.rb
@@ -0,0 +1,211 @@
+# frozen_string_literal: true
+
+module Dalli
+  module Protocol
+    class Meta
+      ##
+      # Class that encapsulates logic for processing meta protocol responses
+      # from memcached.  Includes logic for pulling data from an IO source
+      # and parsing into local values.  Handles errors on unexpected values.
+      ##
+      class ResponseProcessor
+        EN = 'EN'
+        END_TOKEN = 'END'
+        EX = 'EX'
+        HD = 'HD'
+        MN = 'MN'
+        NF = 'NF'
+        NS = 'NS'
+        OK = 'OK'
+        RESET = 'RESET'
+        STAT = 'STAT'
+        VA = 'VA'
+        VERSION = 'VERSION'
+
+        def initialize(io_source, value_marshaller)
+          @io_source = io_source
+          @value_marshaller = value_marshaller
+        end
+
+        def meta_get_with_value(cache_nils: false)
+          tokens = error_on_unexpected!([VA, EN, HD])
+          return cache_nils ? ::Dalli::NOT_FOUND : nil if tokens.first == EN
+          return true unless tokens.first == VA
+
+          @value_marshaller.retrieve(read_line, bitflags_from_tokens(tokens))
+        end
+
+        def meta_get_with_value_and_cas
+          tokens = error_on_unexpected!([VA, EN, HD])
+          return [nil, 0] if tokens.first == EN
+
+          cas = cas_from_tokens(tokens)
+          return [nil, cas] unless tokens.first == VA
+
+          [@value_marshaller.retrieve(read_line, bitflags_from_tokens(tokens)), cas]
+        end
+
+        def meta_get_without_value
+          tokens = error_on_unexpected!([EN, HD])
+          tokens.first == EN ? nil : true
+        end
+
+        def meta_set_with_cas
+          tokens = error_on_unexpected!([HD, NS, NF, EX])
+          return false unless tokens.first == HD
+
+          cas_from_tokens(tokens)
+        end
+
+        def meta_set_append_prepend
+          tokens = error_on_unexpected!([HD, NS, NF, EX])
+          return false unless tokens.first == HD
+
+          true
+        end
+
+        def meta_delete
+          tokens = error_on_unexpected!([HD, NF, EX])
+          tokens.first == HD
+        end
+
+        def decr_incr
+          tokens = error_on_unexpected!([VA, NF, NS, EX])
+          return false if [NS, EX].include?(tokens.first)
+          return nil if tokens.first == NF
+
+          read_line.to_i
+        end
+
+        def stats
+          tokens = error_on_unexpected!([END_TOKEN, STAT])
+          values = {}
+          while tokens.first != END_TOKEN
+            values[tokens[1]] = tokens[2]
+            tokens = next_line_to_tokens
+          end
+          values
+        end
+
+        def flush
+          error_on_unexpected!([OK])
+
+          true
+        end
+
+        def reset
+          error_on_unexpected!([RESET])
+
+          true
+        end
+
+        def version
+          tokens = error_on_unexpected!([VERSION])
+          tokens.last
+        end
+
+        def consume_all_responses_until_mn
+          tokens = next_line_to_tokens
+
+          tokens = next_line_to_tokens while tokens.first != MN
+          true
+        end
+
+        def tokens_from_header_buffer(buf)
+          header = header_from_buffer(buf)
+          tokens = header.split
+          header_len = header.bytesize + TERMINATOR.length
+          body_len = body_len_from_tokens(tokens)
+          [tokens, header_len, body_len]
+        end
+
+        def full_response_from_buffer(tokens, body, resp_size)
+          value = @value_marshaller.retrieve(body, bitflags_from_tokens(tokens))
+          [resp_size, tokens.first == VA, cas_from_tokens(tokens), key_from_tokens(tokens), value]
+        end
+
+        ##
+        # This method returns an array of values used in a pipelined
+        # getk process.  The first value is the number of bytes by
+        # which to advance the pointer in the buffer.  If the
+        # complete response is found in the buffer, this will
+        # be the response size.  Otherwise it is zero.
+        #
+        # The remaining three values in the array are the ResponseHeader,
+        # key, and value.
+        ##
+        def getk_response_from_buffer(buf)
+          # There's no header in the buffer, so don't advance
+          return [0, nil, nil, nil, nil] unless contains_header?(buf)
+
+          tokens, header_len, body_len = tokens_from_header_buffer(buf)
+
+          # We have a complete response that has no body.
+          # This is either the response to the terminating
+          # noop or, if the status is not MN, an intermediate
+          # error response that needs to be discarded.
+          return [header_len, true, nil, nil, nil] if body_len.zero?
+
+          resp_size = header_len + body_len + TERMINATOR.length
+          # The header is in the buffer, but the body is not.  As we don't have
+          # a complete response, don't advance the buffer
+          return [0, nil, nil, nil, nil] unless buf.bytesize >= resp_size
+
+          # The full response is in our buffer, so parse it and return
+          # the values
+          body = buf.slice(header_len, body_len)
+          full_response_from_buffer(tokens, body, resp_size)
+        end
+
+        def contains_header?(buf)
+          buf.include?(TERMINATOR)
+        end
+
+        def header_from_buffer(buf)
+          buf.split(TERMINATOR, 2).first
+        end
+
+        def error_on_unexpected!(expected_codes)
+          tokens = next_line_to_tokens
+          raise Dalli::DalliError, "Response error: #{tokens.first}" unless expected_codes.include?(tokens.first)
+
+          tokens
+        end
+
+        def bitflags_from_tokens(tokens)
+          value_from_tokens(tokens, 'f')&.to_i
+        end
+
+        def cas_from_tokens(tokens)
+          value_from_tokens(tokens, 'c')&.to_i
+        end
+
+        def key_from_tokens(tokens)
+          encoded_key = value_from_tokens(tokens, 'k')
+          base64_encoded = tokens.any?('b')
+          KeyRegularizer.decode(encoded_key, base64_encoded)
+        end
+
+        def body_len_from_tokens(tokens)
+          value_from_tokens(tokens, 's')&.to_i
+        end
+
+        def value_from_tokens(tokens, flag)
+          bitflags_token = tokens.find { |t| t.start_with?(flag) }
+          return 0 unless bitflags_token
+
+          bitflags_token[1..]
+        end
+
+        def read_line
+          @io_source.read_line&.chomp!(TERMINATOR)
+        end
+
+        def next_line_to_tokens
+          line = read_line
+          line&.split || []
+        end
+      end
+    end
+  end
+end
diff --git a/lib/dalli/protocol/response_buffer.rb b/lib/dalli/protocol/response_buffer.rb
new file mode 100644
index 0000000..6fcd648
--- /dev/null
+++ b/lib/dalli/protocol/response_buffer.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'socket'
+require 'timeout'
+
+module Dalli
+  module Protocol
+    ##
+    # Manages the buffer for responses from memcached.
+    ##
+    class ResponseBuffer
+      def initialize(io_source, response_processor)
+        @io_source = io_source
+        @response_processor = response_processor
+        @buffer = nil
+      end
+
+      def read
+        @buffer << @io_source.read_nonblock
+      end
+
+      # Attempts to process a single response from the buffer.  Starts
+      # by advancing the buffer to the specified start position
+      def process_single_getk_response
+        bytes, status, cas, key, value = @response_processor.getk_response_from_buffer(@buffer)
+        advance(bytes)
+        [status, cas, key, value]
+      end
+
+      # Advances the internal response buffer by bytes_to_advance
+      # bytes.  The
+      def advance(bytes_to_advance)
+        return unless bytes_to_advance.positive?
+
+        @buffer = @buffer.byteslice(bytes_to_advance..-1)
+      end
+
+      # Resets the internal buffer to an empty state,
+      # so that we're ready to read pipelined responses
+      def reset
+        @buffer = ''.b
+      end
+
+      # Clear the internal response buffer
+      def clear
+        @buffer = nil
+      end
+
+      def in_progress?
+        !@buffer.nil?
+      end
+    end
+  end
+end
diff --git a/lib/dalli/protocol/server_config_parser.rb b/lib/dalli/protocol/server_config_parser.rb
index 455e1f2..7601fce 100644
--- a/lib/dalli/protocol/server_config_parser.rb
+++ b/lib/dalli/protocol/server_config_parser.rb
@@ -1,5 +1,7 @@
 # frozen_string_literal: true
 
+require 'uri'
+
 module Dalli
   module Protocol
     ##
@@ -19,22 +21,22 @@ module Dalli
       DEFAULT_PORT = 11_211
       DEFAULT_WEIGHT = 1
 
-      def self.parse(str, client_options)
-        return parse_non_uri(str, client_options) unless str.start_with?(MEMCACHED_URI_PROTOCOL)
+      def self.parse(str)
+        return parse_non_uri(str) unless str.start_with?(MEMCACHED_URI_PROTOCOL)
 
-        parse_uri(str, client_options)
+        parse_uri(str)
       end
 
-      def self.parse_uri(str, client_options)
+      def self.parse_uri(str)
         uri = URI.parse(str)
         auth_details = {
           username: uri.user,
           password: uri.password
         }
-        [uri.host, normalize_port(uri.port), DEFAULT_WEIGHT, :tcp, client_options.merge(auth_details)]
+        [uri.host, normalize_port(uri.port), :tcp, DEFAULT_WEIGHT, auth_details]
       end
 
-      def self.parse_non_uri(str, client_options)
+      def self.parse_non_uri(str)
         res = deconstruct_string(str)
 
         hostname = normalize_host_from_match(str, res)
@@ -45,7 +47,7 @@ module Dalli
           socket_type = :tcp
           port, weight = attributes_for_tcp_socket(res)
         end
-        [hostname, port, weight, socket_type, client_options]
+        [hostname, port, socket_type, weight, {}]
       end
 
       def self.deconstruct_string(str)
diff --git a/lib/dalli/ring.rb b/lib/dalli/ring.rb
index ff67217..5792304 100644
--- a/lib/dalli/ring.rb
+++ b/lib/dalli/ring.rb
@@ -23,8 +23,10 @@ module Dalli
 
     attr_accessor :servers, :continuum
 
-    def initialize(servers, options)
-      @servers = servers
+    def initialize(servers_arg, protocol_implementation, options)
+      @servers = servers_arg.map do |s|
+        protocol_implementation.new(s, options)
+      end
       @continuum = nil
       @continuum = build_continuum(servers) if servers.size > 1
 
@@ -79,7 +81,7 @@ module Dalli
       end
     end
 
-    def flush_multi_responses
+    def pipeline_consume_and_ignore_responses
       @servers.each do |s|
         s.request(:noop)
       rescue Dalli::NetworkError
@@ -92,6 +94,10 @@ module Dalli
       @servers.first.socket_timeout
     end
 
+    def close
+      @servers.each(&:close)
+    end
+
     private
 
     def threadsafe!
diff --git a/lib/dalli/servers_arg_normalizer.rb b/lib/dalli/servers_arg_normalizer.rb
index e355d75..7ce44ca 100644
--- a/lib/dalli/servers_arg_normalizer.rb
+++ b/lib/dalli/servers_arg_normalizer.rb
@@ -40,7 +40,7 @@ module Dalli
     def self.apply_defaults(arg)
       return arg unless arg.nil?
 
-      ENV[ENV_VAR_NAME] || DEFAULT_SERVERS
+      ENV.fetch(ENV_VAR_NAME, nil) || DEFAULT_SERVERS
     end
 
     def self.validate_type(arg)
diff --git a/lib/dalli/socket.rb b/lib/dalli/socket.rb
index 72ce453..bf25fb3 100644
--- a/lib/dalli/socket.rb
+++ b/lib/dalli/socket.rb
@@ -13,7 +13,7 @@ module Dalli
     ##
     module InstanceMethods
       def readfull(count)
-        value = +''
+        value = String.new(capacity: count + 1)
         loop do
           result = read_nonblock(count - value.bytesize, exception: false)
           value << result if append_to_buffer?(result)
@@ -85,24 +85,51 @@ module Dalli
     ##
     class TCP < TCPSocket
       include Dalli::Socket::InstanceMethods
-      attr_accessor :options, :server
+      # options - supports enhanced logging in the case of a timeout
+      attr_accessor :options
 
-      def self.open(host, port, server, options = {})
-        Timeout.timeout(options[:socket_timeout]) do
-          sock = new(host, port)
+      def self.open(host, port, options = {})
+        create_socket_with_timeout(host, port, options) do |sock|
           sock.options = { host: host, port: port }.merge(options)
-          sock.server = server
           init_socket_options(sock, options)
 
           options[:ssl_context] ? wrapping_ssl_socket(sock, host, options[:ssl_context]) : sock
         end
       end
 
+      def self.create_socket_with_timeout(host, port, options)
+        # Check that TCPSocket#initialize was not overwritten by resolv-replace gem
+        # (part of ruby standard library since 3.0.0, should be removed in 3.4.0),
+        # as it does not handle keyword arguments correctly.
+        # To check this we are using the fact that resolv-replace
+        # aliases TCPSocket#initialize method to #original_resolv_initialize.
+        # https://github.com/ruby/resolv-replace/blob/v0.1.1/lib/resolv-replace.rb#L21
+        if RUBY_VERSION >= '3.0' &&
+           !::TCPSocket.private_instance_methods.include?(:original_resolv_initialize)
+          sock = new(host, port, connect_timeout: options[:socket_timeout])
+          yield(sock)
+        else
+          Timeout.timeout(options[:socket_timeout]) do
+            sock = new(host, port)
+            yield(sock)
+          end
+        end
+      end
+
       def self.init_socket_options(sock, options)
         sock.setsockopt(::Socket::IPPROTO_TCP, ::Socket::TCP_NODELAY, true)
         sock.setsockopt(::Socket::SOL_SOCKET, ::Socket::SO_KEEPALIVE, true) if options[:keepalive]
         sock.setsockopt(::Socket::SOL_SOCKET, ::Socket::SO_RCVBUF, options[:rcvbuf]) if options[:rcvbuf]
         sock.setsockopt(::Socket::SOL_SOCKET, ::Socket::SO_SNDBUF, options[:sndbuf]) if options[:sndbuf]
+
+        return unless options[:socket_timeout]
+
+        seconds, fractional = options[:socket_timeout].divmod(1)
+        microseconds = fractional * 1_000_000
+        timeval = [seconds, microseconds].pack('l_2')
+
+        sock.setsockopt(::Socket::SOL_SOCKET, ::Socket::SO_RCVTIMEO, timeval)
+        sock.setsockopt(::Socket::SOL_SOCKET, ::Socket::SO_SNDTIMEO, timeval)
       end
 
       def self.wrapping_ssl_socket(tcp_socket, host, ssl_context)
@@ -132,13 +159,15 @@ module Dalli
       ##
       class UNIX < UNIXSocket
         include Dalli::Socket::InstanceMethods
-        attr_accessor :options, :server
 
-        def self.open(path, server, options = {})
+        # options - supports enhanced logging in the case of a timeout
+        # server  - used to support IO.select in the pipelined getter
+        attr_accessor :options
+
+        def self.open(path, options = {})
           Timeout.timeout(options[:socket_timeout]) do
             sock = new(path)
             sock.options = { path: path }.merge(options)
-            sock.server = server
             sock
           end
         end
diff --git a/lib/dalli/version.rb b/lib/dalli/version.rb
index 1c3fff4..dd6d1d8 100644
--- a/lib/dalli/version.rb
+++ b/lib/dalli/version.rb
@@ -1,7 +1,7 @@
 # frozen_string_literal: true
 
 module Dalli
-  VERSION = '3.0.6'
+  VERSION = '3.2.8'
 
   MIN_SUPPORTED_MEMCACHED_VERSION = '1.4'
 end
diff --git a/lib/rack/session/dalli.rb b/lib/rack/session/dalli.rb
index d0c3f78..a73bef9 100644
--- a/lib/rack/session/dalli.rb
+++ b/lib/rack/session/dalli.rb
@@ -8,14 +8,13 @@ require 'English'
 module Rack
   module Session
     # Rack::Session::Dalli provides memcached based session management.
-    class Dalli < Abstract::Persisted
-      attr_reader :pool
+    class Dalli < Abstract::PersistedSecure
+      attr_reader :data
 
       # Don't freeze this until we fix the specs/implementation
       # rubocop:disable Style/MutableConstant
       DEFAULT_DALLI_OPTIONS = {
-        namespace: 'rack:session',
-        memcache_server: 'localhost:11211'
+        namespace: 'rack:session'
       }
       # rubocop:enable Style/MutableConstant
 
@@ -33,25 +32,14 @@ module Rack
       # ENV['MEMCACHE_SERVERS'] and use that value if it is available, or fall
       # back to the same default behavior described above.
       #
-      # Rack::Session::Dalli is intended to be a drop-in replacement for
-      # Rack::Session::Memcache. It accepts additional options that control the
-      # behavior of Rack::Session, Dalli::Client, and an optional
-      # ConnectionPool. First and foremost, if you wish to instantiate your own
-      # Dalli::Client (or ConnectionPool) and use that instead of letting
-      # Rack::Session::Dalli instantiate it on your behalf, simply pass it in
-      # as the `:cache` option. Please note that you will be responsible for
-      # setting the namespace and any other options on Dalli::Client.
+      # Rack::Session::Dalli accepts the same options as Dalli::Client, so
+      # it's worth reviewing its documentation. Perhaps most importantly,
+      # if you don't specify a `:namespace` option, Rack::Session::Dalli
+      # will default to using 'rack:session'.
       #
-      # Secondly, if you're not using the `:cache` option, Rack::Session::Dalli
-      # accepts the same options as Dalli::Client, so it's worth reviewing its
-      # documentation. Perhaps most importantly, if you don't specify a
-      # `:namespace` option, Rack::Session::Dalli will default to using
-      # "rack:session".
-      #
-      # Whether you are using the `:cache` option or not, it is not recommend
-      # to set `:expires_in`. Instead, use `:expire_after`, which will control
-      # both the expiration of the client cookie as well as the expiration of
-      # the corresponding entry in memcached.
+      # It is not recommended to set `:expires_in`. Instead, use `:expire_after`,
+      # which will control both the expiration of the client cookie as well
+      # as the expiration of the corresponding entry in memcached.
       #
       # Rack::Session::Dalli also accepts a host of options that control how
       # the sessions and session cookies are managed, including the
@@ -78,95 +66,125 @@ module Rack
         super
 
         # Determine the default TTL for newly-created sessions
-        @default_ttl = ttl @default_options[:expire_after]
-
-        # Normalize and validate passed options
-        mserv, mopts, popts = extract_dalli_options(options)
-
-        @pool = ConnectionPool.new(popts || {}) { ::Dalli::Client.new(mserv, mopts) }
+        @default_ttl = ttl(@default_options[:expire_after])
+        @data = build_data_source(options)
       end
 
-      def get_session(_env, sid)
-        with_block([nil, {}]) do |dc|
-          unless sid && !sid.empty? && (session = dc.get(sid))
-            old_sid = sid
-            sid = generate_sid_with(dc)
-            session = {}
-            unless dc.add(sid, session, @default_ttl)
-              sid = old_sid
-              redo # generate a new sid and try again
-            end
-          end
-          [sid, session]
+      def find_session(_req, sid)
+        with_dalli_client([nil, {}]) do |dc|
+          existing_session = existing_session_for_sid(dc, sid)
+          return [sid, existing_session] unless existing_session.nil?
+
+          [create_sid_with_empty_session(dc), {}]
         end
       end
 
-      def set_session(_env, session_id, new_session, options)
-        return false unless session_id
+      def write_session(_req, sid, session, options)
+        return false unless sid
 
-        with_block(false) do |dc|
-          dc.set(session_id, new_session, ttl(options[:expire_after]))
-          session_id
+        key = memcached_key_from_sid(sid)
+        return false unless key
+
+        with_dalli_client(false) do |dc|
+          dc.set(memcached_key_from_sid(sid), session, ttl(options[:expire_after]))
+          sid
         end
       end
 
-      def destroy_session(_env, session_id, options)
-        with_block do |dc|
-          dc.delete(session_id)
+      def delete_session(_req, sid, options)
+        with_dalli_client do |dc|
+          key = memcached_key_from_sid(sid)
+          dc.delete(key) if key
           generate_sid_with(dc) unless options[:drop]
         end
       end
 
-      def find_session(req, sid)
-        get_session req.env, sid
+      private
+
+      def memcached_key_from_sid(sid)
+        sid.private_id if sid.respond_to?(:private_id)
       end
 
-      def write_session(req, sid, session, options)
-        set_session req.env, sid, session, options
+      def existing_session_for_sid(client, sid)
+        return nil unless sid && !sid.empty?
+
+        key = memcached_key_from_sid(sid)
+        return nil if key.nil?
+
+        client.get(key)
       end
 
-      def delete_session(req, sid, options)
-        destroy_session req.env, sid, options
+      def create_sid_with_empty_session(client)
+        loop do
+          sid = generate_sid_with(client)
+          key = memcached_key_from_sid(sid)
+
+          break sid if key && client.add(key, {}, @default_ttl)
+        end
       end
 
-      private
+      def generate_sid_with(client)
+        loop do
+          raw_sid = generate_sid
+          sid = raw_sid.is_a?(String) ? Rack::Session::SessionId.new(raw_sid) : raw_sid
+          key = memcached_key_from_sid(sid)
+          break sid unless key && client.get(key)
+        end
+      end
+
+      def build_data_source(options)
+        server_configurations, client_options, pool_options = extract_dalli_options(options)
+
+        if pool_options.empty?
+          ::Dalli::Client.new(server_configurations, client_options)
+        else
+          ensure_connection_pool_added!
+          ConnectionPool.new(pool_options) do
+            ::Dalli::Client.new(server_configurations, client_options.merge(threadsafe: false))
+          end
+        end
+      end
 
       def extract_dalli_options(options)
         raise 'Rack::Session::Dalli no longer supports the :cache option.' if options[:cache]
 
-        # Filter out Rack::Session-specific options and apply our defaults
+        client_options = retrieve_client_options(options)
+        server_configurations = client_options.delete(:memcache_server)
+
+        [server_configurations, client_options, retrieve_pool_options(options)]
+      end
+
+      def retrieve_client_options(options)
         # Filter out Rack::Session-specific options and apply our defaults
         filtered_opts = options.reject { |k, _| DEFAULT_OPTIONS.key? k }
-        mopts = DEFAULT_DALLI_OPTIONS.merge(filtered_opts)
-        mserv = mopts.delete :memcache_server
-
-        popts = {}
-        if mopts[:pool_size] || mopts[:pool_timeout]
-          popts[:size] = mopts.delete :pool_size if mopts[:pool_size]
-          popts[:timeout] = mopts.delete :pool_timeout if mopts[:pool_timeout]
-          mopts[:threadsafe] = true
-        end
-
-        [mserv, mopts, popts]
+        DEFAULT_DALLI_OPTIONS.merge(filtered_opts)
       end
 
-      def generate_sid_with(client)
-        loop do
-          sid = generate_sid
-          break sid unless client.get(sid)
+      def retrieve_pool_options(options)
+        {}.tap do |pool_options|
+          pool_options[:size] = options.delete(:pool_size) if options[:pool_size]
+          pool_options[:timeout] = options.delete(:pool_timeout) if options[:pool_timeout]
         end
       end
 
-      def with_block(default = nil, &block)
-        @pool.with(&block)
+      def ensure_connection_pool_added!
+        require 'connection_pool'
+      rescue LoadError => e
+        warn "You don't have connection_pool installed in your application. " \
+             'Please add it to your Gemfile and run bundle install'
+        raise e
+      end
+
+      def with_dalli_client(result_on_error = nil, &block)
+        @data.with(&block)
       rescue ::Dalli::DalliError, Errno::ECONNREFUSED
-        raise if /undefined class/.match?($ERROR_INFO.message)
+        raise if $ERROR_INFO.message.include?('undefined class')
 
         if $VERBOSE
           warn "#{self} is unable to find memcached server."
           warn $ERROR_INFO.inspect
         end
-        default
+        result_on_error
       end
 
       def ttl(expire_after)
diff --git a/scripts/install_memcached.sh b/scripts/install_memcached.sh
index 067b1a1..8930371 100644
--- a/scripts/install_memcached.sh
+++ b/scripts/install_memcached.sh
@@ -2,14 +2,28 @@
 
 version=$MEMCACHED_VERSION
 
-echo Installing Memcached version ${version}
 
 sudo apt-get -y remove memcached
-sudo apt-get install libevent-dev
-sudo apt-get install libsasl2-dev
+sudo apt-get install libevent-dev libsasl2-dev sasl2-bin
+
+echo Installing Memcached version ${version}
+
+# Install memcached with SASL and TLS support
 wget https://memcached.org/files/memcached-${version}.tar.gz
 tar -zxvf memcached-${version}.tar.gz
 cd memcached-${version}
 ./configure --enable-sasl --enable-tls
 make
 sudo mv memcached /usr/local/bin/
+
+echo Memcached version ${version} installation complete
+
+echo Configuring SASL
+
+# Create SASL credentials for testing
+echo 'mech_list: plain' | sudo tee -a /usr/lib/sasl2/memcached.conf > /dev/null
+
+echo testtest | sudo saslpasswd2 -a memcached -c testuser -p
+sudo chmod 644 /etc/sasldb2
+
+echo SASL configuration complete
diff --git a/test/benchmark_test.rb b/test/benchmark_test.rb
index decee62..c388034 100644
--- a/test/benchmark_test.rb
+++ b/test/benchmark_test.rb
@@ -36,12 +36,13 @@ describe 'performance' do
   end
 
   it 'runs benchmarks' do
-    memcached(@port) do
+    protocol = :binary
+    memcached(protocol, @port) do
       profile do
         Benchmark.bm(37) do |x|
           n = 2500
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('set:plain:dalli') do
             n.times do
               @m.set @key1, @marshalled, 0, raw: true
@@ -53,7 +54,7 @@ describe 'performance' do
             end
           end
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('setq:plain:dalli') do
             @m.multi do
               n.times do
@@ -67,7 +68,7 @@ describe 'performance' do
             end
           end
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('set:ruby:dalli') do
             n.times do
               @m.set @key1, @value
@@ -79,7 +80,7 @@ describe 'performance' do
             end
           end
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('get:plain:dalli') do
             n.times do
               @m.get @key1, raw: true
@@ -91,7 +92,7 @@ describe 'performance' do
             end
           end
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('get:ruby:dalli') do
             n.times do
               @m.get @key1
@@ -103,7 +104,7 @@ describe 'performance' do
             end
           end
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('multiget:ruby:dalli') do
             n.times do
               # We don't use the keys array because splat is slow
@@ -111,7 +112,7 @@ describe 'performance' do
             end
           end
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           # rubocop:disable Lint/SuppressedException
           x.report('missing:ruby:dalli') do
             n.times do
@@ -125,7 +126,7 @@ describe 'performance' do
           end
           # rubocop:enable Lint/SuppressedException
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('mixed:ruby:dalli') do
             n.times do
               @m.set @key1, @value
@@ -143,7 +144,7 @@ describe 'performance' do
             end
           end
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('mixedq:ruby:dalli') do
             n.times do
               @m.multi do
@@ -172,7 +173,7 @@ describe 'performance' do
             end
           end
 
-          @m = Dalli::Client.new(@servers)
+          @m = Dalli::Client.new(@servers, protocol: protocol)
           x.report('incr:ruby:dalli') do
             counter = 'foocount'
             n.times do
diff --git a/test/helper.rb b/test/helper.rb
index 8a1abc5..7567f0f 100644
--- a/test/helper.rb
+++ b/test/helper.rb
@@ -7,7 +7,6 @@ require 'minitest/pride'
 require 'minitest/autorun'
 require_relative 'helpers/memcached'
 
-ENV['MEMCACHED_SASL_PWDB'] = "#{File.dirname(__FILE__)}/sasl/sasldb"
 ENV['SASL_CONF_PATH'] = "#{File.dirname(__FILE__)}/sasl/memcached.conf"
 
 require 'dalli'
@@ -18,26 +17,38 @@ require 'securerandom'
 Dalli.logger = Logger.new($stdout)
 Dalli.logger.level = Logger::ERROR
 
-module MiniTest
+# Checks if memcached is installed and loads the version,
+# supported protocols
+raise StandardError, 'No supported version of memcached could be found.' unless MemcachedManager.version
+
+# Generate self-signed certs for SSL once per suite run.
+CertificateGenerator.generate
+
+module Minitest
   class Spec
     include Memcached::Helper
 
     def assert_error(error, regexp = nil, &block)
       ex = assert_raises(error, &block)
+
       assert_match(regexp, ex.message, "#{ex.class.name}: #{ex.message}\n#{ex.backtrace.join("\n\t")}")
     end
 
+    def valid_cas?(cas)
+      cas.is_a?(Integer) && cas.positive?
+    end
+
     def op_cas_succeeds(rsp)
-      rsp.is_a?(Integer) && rsp.positive?
+      valid_cas?(rsp)
     end
 
     def op_replace_succeeds(rsp)
-      rsp.is_a?(Integer) && rsp.positive?
+      valid_cas?(rsp)
     end
 
     # add and set must have the same return value because of DalliStore#write_entry
     def op_addset_succeeds(rsp)
-      rsp.is_a?(Integer) && rsp.positive?
+      valid_cas?(rsp)
     end
 
     def with_connectionpool
diff --git a/test/helpers/memcached.rb b/test/helpers/memcached.rb
index 6c6f44e..5923ece 100644
--- a/test/helpers/memcached.rb
+++ b/test/helpers/memcached.rb
@@ -36,8 +36,8 @@ module Memcached
     # client_options - Options passed to the Dalli::Client on initialization
     # terminate_process - whether to terminate the memcached process on
     #                     exiting the block
-    def memcached(port_or_socket, args = '', client_options = {}, terminate_process: true)
-      dc = MemcachedManager.start_and_flush_with_retry(port_or_socket, args, client_options)
+    def memcached(protocol, port_or_socket, args = '', client_options = {}, terminate_process: true)
+      dc = MemcachedManager.start_and_flush_with_retry(port_or_socket, args, client_options.merge(protocol: protocol))
       yield dc, port_or_socket if block_given?
       memcached_kill(port_or_socket) if terminate_process
     end
@@ -45,14 +45,16 @@ module Memcached
     # Launches a memcached process using the memcached method in this module,
     # but sets terminate_process to false ensuring that the process persists
     # past execution of the block argument.
-    def memcached_persistent(port_or_socket = 21_345, args = '', client_options = {}, &block)
-      memcached(port_or_socket, args, client_options, terminate_process: false, &block)
+    # rubocop:disable Metrics/ParameterLists
+    def memcached_persistent(protocol = :binary, port_or_socket = 21_345, args = '', client_options = {}, &block)
+      memcached(protocol, port_or_socket, args, client_options, terminate_process: false, &block)
     end
+    # rubocop:enable Metrics/ParameterLists
 
     # Launches a persistent memcached process, configured to use SSL
-    def memcached_ssl_persistent(port_or_socket = 21_397, &block)
-      CertificateGenerator.generate
-      memcached_persistent(port_or_socket,
+    def memcached_ssl_persistent(protocol = :binary, port_or_socket = rand(21_397..21_896), &block)
+      memcached_persistent(protocol,
+                           port_or_socket,
                            CertificateGenerator.ssl_args,
                            { ssl_context: CertificateGenerator.ssl_context },
                            &block)
@@ -65,8 +67,8 @@ module Memcached
     end
 
     # Launches a persistent memcached process, configured to use SASL authentication
-    def memcached_sasl_persistent(port_or_socket = 21_397, &block)
-      memcached_persistent(port_or_socket, '-S', sasl_credentials, &block)
+    def memcached_sasl_persistent(port_or_socket = 21_398, &block)
+      memcached_persistent(:binary, port_or_socket, '-S', sasl_credentials, &block)
     end
 
     # The SASL credentials used for the test SASL server
diff --git a/test/integration/test_authentication.rb b/test/integration/test_authentication.rb
new file mode 100644
index 0000000..a0145f6
--- /dev/null
+++ b/test/integration/test_authentication.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'authentication' do
+  describe 'using the meta protocol' do
+    let(:username) { SecureRandom.hex(5) }
+    it 'raises an error if the username is set' do
+      err = assert_raises Dalli::DalliError do
+        memcached_persistent(:meta, 21_345, '', username: username) do |dc|
+          dc.flush
+          dc.set('key1', 'abcd')
+        end
+      end
+
+      assert_equal 'Authentication not supported for the meta protocol.', err.message
+    end
+  end
+end
diff --git a/test/integration/test_cas.rb b/test/integration/test_cas.rb
new file mode 100644
index 0000000..7b7fb7b
--- /dev/null
+++ b/test/integration/test_cas.rb
@@ -0,0 +1,346 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'CAS behavior' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      describe 'get_cas' do
+        describe 'when no block is given' do
+          it 'returns the value and a CAS' do
+            memcached_persistent(p) do |dc|
+              dc.flush
+
+              dc.set('key1', 'abcd')
+              value, cas = dc.get_cas('key1')
+
+              assert_equal 'abcd', value
+              assert valid_cas?(cas)
+            end
+          end
+
+          # This is historical, as the 'Not found' value was
+          # treated as a special case at one time
+          it 'allows "Not found" as value' do
+            memcached_persistent(p) do |dc|
+              dc.flush
+
+              dc.set('key1', 'Not found')
+              value, cas = dc.get_cas('key1')
+
+              assert_equal 'Not found', value
+              assert valid_cas?(cas)
+            end
+          end
+
+          it 'returns [nil, 0] on a miss' do
+            memcached_persistent(p) do |dc|
+              dc.flush
+              value, cas = dc.get_cas('key1')
+
+              assert_nil value
+              assert_equal 0, cas
+            end
+          end
+        end
+
+        describe 'when a block is given' do
+          it 'yields the value and a CAS to the block' do
+            memcached_persistent(p) do |dc|
+              dc.flush
+
+              expected = { 'blah' => 'blerg!' }
+
+              set_cas = dc.set('gets_key', expected)
+              get_block_called = false
+              block_value = SecureRandom.hex(4)
+              stored_value = stored_cas = nil
+
+              # Validate call-with-block on hit
+              res = dc.get_cas('gets_key') do |v, cas|
+                get_block_called = true
+                stored_value = v
+                stored_cas = cas
+                block_value
+              end
+
+              assert get_block_called
+              assert_equal expected, stored_value
+              assert valid_cas?(stored_cas)
+              assert_equal set_cas, stored_cas
+              assert_equal block_value, res
+            end
+          end
+
+          # This is historical, as the 'Not found' value was
+          # treated as a special case at one time
+          it 'allows "Not found" as value' do
+            memcached_persistent(p) do |dc|
+              dc.flush
+
+              expected = 'Not found'
+
+              set_cas = dc.set('gets_key', expected)
+              get_block_called = false
+              block_value = SecureRandom.hex(4)
+              stored_value = stored_cas = nil
+
+              # Validate call-with-block on hit
+              res = dc.get_cas('gets_key') do |v, cas|
+                get_block_called = true
+                stored_value = v
+                stored_cas = cas
+                block_value
+              end
+
+              assert get_block_called
+              assert_equal expected, stored_value
+              assert valid_cas?(stored_cas)
+              assert_equal set_cas, stored_cas
+              assert_equal block_value, res
+            end
+          end
+
+          it 'yields [nil, 0] to the block on a miss' do
+            memcached_persistent(p) do |dc|
+              dc.flush
+
+              get_block_called = false
+              block_value = SecureRandom.hex(4)
+              stored_value = stored_cas = nil
+              # Validate call-with-block on miss
+              res = dc.get_cas('gets_key') do |v, cas|
+                get_block_called = true
+                stored_value = v
+                stored_cas = cas
+                block_value
+              end
+
+              assert get_block_called
+              assert_nil stored_value
+              assert_equal 0, stored_cas
+              assert_equal block_value, res
+            end
+          end
+        end
+      end
+
+      it 'supports multi-get with CAS' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+
+          expected_hash = { 'a' => 'foo', 'b' => 123 }
+          expected_hash.each_pair do |k, v|
+            dc.set(k, v)
+          end
+
+          # Invocation without block
+          resp = dc.get_multi_cas(%w[a b c d e f])
+          resp.each_pair do |k, data|
+            value = data.first
+            cas = data[1]
+
+            assert_equal expected_hash[k], value
+            assert(cas && cas != 0)
+          end
+
+          # Invocation with block
+          dc.get_multi_cas(%w[a b c d e f]) do |k, data|
+            value = data.first
+            cas = data[1]
+
+            assert_equal expected_hash[k], value
+            assert(cas && cas != 0)
+          end
+        end
+      end
+
+      it 'supports replace-with-CAS operation' do
+        memcached_persistent(p) do |dc|
+          dc.flush
+          cas = dc.set('key', 'value')
+
+          # Accepts CAS, replaces, and returns new CAS
+          cas = dc.replace_cas('key', 'value2', cas)
+
+          assert_kind_of Integer, cas
+
+          assert_equal 'value2', dc.get('key')
+        end
+      end
+
+      # There's a bug in some versions of memcached where
+      # the meta delete doesn't honor the CAS argument
+      # Ensure our tests run correctly when used with
+      # either set of versions
+      if MemcachedManager.supports_delete_cas?(p)
+        it 'supports delete with CAS' do
+          memcached_persistent(p) do |dc|
+            cas = dc.set('some_key', 'some_value')
+
+            # It returns falsey and doesn't delete
+            # when the CAS is wrong
+            refute dc.delete_cas('some_key', 123)
+            assert_equal 'some_value', dc.get('some_key')
+
+            dc.delete_cas('some_key', cas)
+
+            assert_nil dc.get('some_key')
+
+            refute dc.delete_cas('nonexist', 123)
+          end
+        end
+
+        it 'handles CAS round-trip operations' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            expected = { 'blah' => 'blerg!' }
+            dc.set('some_key', expected)
+
+            value, cas = dc.get_cas('some_key')
+
+            assert_equal value, expected
+            assert(!cas.nil? && cas != 0)
+
+            # Set operation, first with wrong then with correct CAS
+            expected = { 'blah' => 'set succeeded' }
+
+            refute(dc.set_cas('some_key', expected, cas + 1))
+            assert op_addset_succeeds(cas = dc.set_cas('some_key', expected, cas))
+
+            # Replace operation, first with wrong then with correct CAS
+            expected = { 'blah' => 'replace succeeded' }
+
+            refute(dc.replace_cas('some_key', expected, cas + 1))
+            assert op_addset_succeeds(cas = dc.replace_cas('some_key', expected, cas))
+
+            # Delete operation, first with wrong then with correct CAS
+            refute(dc.delete_cas('some_key', cas + 1))
+            assert dc.delete_cas('some_key', cas)
+          end
+        end
+      end
+
+      describe 'cas' do
+        it 'does not call the block when the key has no existing value' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            resp = dc.cas('cas_key') do |_value|
+              raise('Value it not exist')
+            end
+
+            assert_nil resp
+            assert_nil dc.cas('cas_key')
+          end
+        end
+
+        it 'calls the block and sets a new value when the key has an existing value' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            expected = { 'blah' => 'blerg!' }
+            dc.set('cas_key', expected)
+
+            mutated = { 'blah' => 'foo!' }
+            resp = dc.cas('cas_key') do |value|
+              assert_equal expected, value
+              mutated
+            end
+
+            assert op_cas_succeeds(resp)
+
+            resp = dc.get('cas_key')
+
+            assert_equal mutated, resp
+          end
+        end
+
+        it "calls the block and sets a new value when the key has the value 'Not found'" do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            expected = 'Not found'
+            dc.set('cas_key', expected)
+
+            mutated = { 'blah' => 'foo!' }
+            resp = dc.cas('cas_key') do |value|
+              assert_equal expected, value
+              mutated
+            end
+
+            assert op_cas_succeeds(resp)
+
+            resp = dc.get('cas_key')
+
+            assert_equal mutated, resp
+          end
+        end
+      end
+
+      describe 'cas!' do
+        it 'calls the block and sets a new value  when the key has no existing value' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            mutated = { 'blah' => 'foo!' }
+            resp = dc.cas!('cas_key') do |value|
+              assert_nil value
+              mutated
+            end
+
+            assert op_cas_succeeds(resp)
+
+            resp = dc.get('cas_key')
+
+            assert_equal mutated, resp
+          end
+        end
+
+        it 'calls the block and sets a new value when the key has an existing value' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            expected = { 'blah' => 'blerg!' }
+            dc.set('cas_key', expected)
+
+            mutated = { 'blah' => 'foo!' }
+            resp = dc.cas!('cas_key') do |value|
+              assert_equal expected, value
+              mutated
+            end
+
+            assert op_cas_succeeds(resp)
+
+            resp = dc.get('cas_key')
+
+            assert_equal mutated, resp
+          end
+        end
+
+        it "calls the block and sets a new value when the key has the value 'Not found'" do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            expected = 'Not found'
+            dc.set('cas_key', expected)
+
+            mutated = { 'blah' => 'foo!' }
+            resp = dc.cas!('cas_key') do |value|
+              assert_equal expected, value
+              mutated
+            end
+
+            assert op_cas_succeeds(resp)
+
+            resp = dc.get('cas_key')
+
+            assert_equal mutated, resp
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_compressor.rb b/test/integration/test_compressor.rb
new file mode 100644
index 0000000..26cb378
--- /dev/null
+++ b/test/integration/test_compressor.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+require 'json'
+
+class NoopCompressor
+  def self.compress(data)
+    data
+  end
+
+  def self.decompress(data)
+    data
+  end
+end
+
+describe 'Compressor' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'default to Dalli::Compressor' do
+        memcached(p, 29_199) do |dc|
+          dc.set 1, 2
+
+          assert_equal Dalli::Compressor, dc.instance_variable_get(:@ring).servers.first.compressor
+        end
+      end
+
+      it 'support a custom compressor' do
+        memcached(p, 29_199) do |_dc|
+          memcache = Dalli::Client.new('127.0.0.1:29199', { compressor: NoopCompressor })
+          memcache.set 1, 2
+          begin
+            assert_equal NoopCompressor,
+                         memcache.instance_variable_get(:@ring).servers.first.compressor
+
+            memcached(p, 19_127) do |newdc|
+              assert newdc.set('string-test', 'a test string')
+              assert_equal('a test string', newdc.get('string-test'))
+            end
+          end
+        end
+      end
+
+      describe 'GzipCompressor' do
+        it 'compress and uncompress data using Zlib::GzipWriter/Reader' do
+          memcached(p, 19_127) do |_dc|
+            memcache = Dalli::Client.new('127.0.0.1:19127', { compress: true, compressor: Dalli::GzipCompressor })
+            data = (0...1025).map { rand(65..90).chr }.join
+
+            assert memcache.set('test', data)
+            assert_equal(data, memcache.get('test'))
+            assert_equal Dalli::GzipCompressor, memcache.instance_variable_get(:@ring).servers.first.compressor
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_concurrency.rb b/test/integration/test_concurrency.rb
new file mode 100644
index 0000000..f7e0de9
--- /dev/null
+++ b/test/integration/test_concurrency.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'concurrent behavior' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'supports multithreaded access' do
+        memcached_persistent(p) do |cache|
+          cache.flush
+          workers = []
+
+          cache.set('f', 'zzz')
+
+          assert op_cas_succeeds((cache.cas('f') do |value|
+            value << 'z'
+          end))
+          assert_equal 'zzzz', cache.get('f')
+
+          # Have a bunch of threads perform a bunch of operations at the same time.
+          # Verify the result of each operation to ensure the request and response
+          # are not intermingled between threads.
+          10.times do
+            workers << Thread.new do
+              100.times do
+                cache.set('a', 9)
+                cache.set('b', 11)
+                cache.incr('cat', 10, 0, 10)
+                cache.set('f', 'zzz')
+                res = cache.cas('f') do |value|
+                  value << 'z'
+                end
+
+                refute_nil res
+                refute cache.add('a', 11)
+                assert_equal({ 'a' => 9, 'b' => 11 }, cache.get_multi(%w[a b]))
+                inc = cache.incr('cat', 10)
+
+                assert_equal 0, inc % 5
+                cache.decr('cat', 5)
+
+                assert_equal 11, cache.get('b')
+
+                assert_equal %w[a b], cache.get_multi('a', 'b', 'c').keys.sort
+              end
+            end
+          end
+
+          workers.each(&:join)
+          cache.flush
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_connection_pool.rb b/test/integration/test_connection_pool.rb
new file mode 100644
index 0000000..605994f
--- /dev/null
+++ b/test/integration/test_connection_pool.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'connection pool behavior' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'can masquerade as a connection pool using the with method' do
+        memcached_persistent(p) do |dc|
+          dc.with { |c| c.set('some_key', 'some_value') }
+
+          assert_equal 'some_value', dc.get('some_key')
+
+          dc.with { |c| c.delete('some_key') }
+
+          assert_nil dc.get('some_key')
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_encoding.rb b/test/integration/test_encoding.rb
new file mode 100644
index 0000000..381b717
--- /dev/null
+++ b/test/integration/test_encoding.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'Encoding' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'supports Unicode values' do
+        memcached_persistent(p) do |dc|
+          key = 'foo'
+          utf8 = 'ƒ©åÍÎ'
+
+          assert dc.set(key, utf8)
+          assert_equal utf8, dc.get(key)
+        end
+      end
+
+      it 'supports Unicode keys' do
+        memcached_persistent(p) do |dc|
+          utf_key = utf8 = 'ƒ©åÍÎ'
+
+          dc.set(utf_key, utf8)
+
+          assert_equal utf8, dc.get(utf_key)
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_failover.rb b/test/integration/test_failover.rb
new file mode 100644
index 0000000..85de7e7
--- /dev/null
+++ b/test/integration/test_failover.rb
@@ -0,0 +1,179 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'failover' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      # Timeouts on JRuby work differently and aren't firing, meaning we're
+      # not testing the condition
+      unless defined? JRUBY_VERSION
+        describe 'timeouts' do
+          it 'not lead to corrupt sockets' do
+            memcached_persistent(p) do |dc|
+              value = { test: '123' }
+              begin
+                Timeout.timeout 0.01 do
+                  start_time = Time.now
+                  10_000.times do
+                    dc.set('test_123', value)
+                  end
+
+                  flunk("Did not timeout in #{Time.now - start_time}")
+                end
+              rescue Timeout::Error
+                # Ignore expected timeout
+              end
+
+              assert_equal(value, dc.get('test_123'))
+            end
+          end
+        end
+      end
+
+      describe 'assuming some bad servers' do
+        it 'silently reconnect if server hiccups' do
+          server_port = 30_124
+          memcached_persistent(p, server_port) do |dc, port|
+            dc.set 'foo', 'bar'
+            foo = dc.get 'foo'
+
+            assert_equal('bar', foo)
+
+            memcached_kill(port)
+            memcached_persistent(p, port) do
+              foo = dc.get 'foo'
+
+              assert_nil foo
+
+              memcached_kill(port)
+            end
+          end
+        end
+
+        it 'reconnects if server idles the connection' do
+          port1 = 32_112
+          port2 = 37_887
+
+          memcached(p, port1, '-o idle_timeout=1') do |_, first_port|
+            memcached(p, port2, '-o idle_timeout=1') do |_, second_port|
+              dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
+              dc.set 'foo', 'bar'
+              dc.set 'foo2', 'bar2'
+              foo = dc.get_multi 'foo', 'foo2'
+
+              assert_equal({ 'foo' => 'bar', 'foo2' => 'bar2' }, foo)
+
+              # wait for socket to expire and get cleaned up
+              sleep 5
+
+              foo = dc.get_multi 'foo', 'foo2'
+
+              assert_equal({ 'foo' => 'bar', 'foo2' => 'bar2' }, foo)
+            end
+          end
+        end
+
+        it 'handle graceful failover' do
+          port1 = 31_777
+          port2 = 32_113
+          memcached_persistent(p, port1) do |_first_dc, first_port|
+            memcached_persistent(p, port2) do |_second_dc, second_port|
+              dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
+              dc.set 'foo', 'bar'
+              foo = dc.get 'foo'
+
+              assert_equal('bar', foo)
+
+              memcached_kill(first_port)
+
+              dc.set 'foo', 'bar'
+              foo = dc.get 'foo'
+
+              assert_equal('bar', foo)
+
+              memcached_kill(second_port)
+
+              assert_raises Dalli::RingError, message: 'No server available' do
+                dc.set 'foo', 'bar'
+              end
+            end
+          end
+        end
+
+        it 'handle them gracefully in get_multi' do
+          port1 = 32_971
+          port2 = 34_312
+          memcached_persistent(p, port1) do |_first_dc, first_port|
+            memcached(p, port2) do |_second_dc, second_port|
+              dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
+              dc.set 'a', 'a1'
+              result = dc.get_multi ['a']
+
+              assert_equal({ 'a' => 'a1' }, result)
+
+              memcached_kill(first_port)
+
+              result = dc.get_multi ['a']
+
+              assert_equal({ 'a' => 'a1' }, result)
+            end
+          end
+        end
+
+        it 'handle graceful failover in get_multi' do
+          port1 = 34_541
+          port2 = 33_044
+          memcached_persistent(p, port1) do |_first_dc, first_port|
+            memcached_persistent(p, port2) do |_second_dc, second_port|
+              dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
+              dc.set 'foo', 'foo1'
+              dc.set 'bar', 'bar1'
+              result = dc.get_multi %w[foo bar]
+
+              assert_equal({ 'foo' => 'foo1', 'bar' => 'bar1' }, result)
+
+              memcached_kill(first_port)
+
+              dc.set 'foo', 'foo1'
+              dc.set 'bar', 'bar1'
+              result = dc.get_multi %w[foo bar]
+
+              assert_equal({ 'foo' => 'foo1', 'bar' => 'bar1' }, result)
+
+              memcached_kill(second_port)
+
+              result = dc.get_multi %w[foo bar]
+
+              assert_empty(result)
+            end
+          end
+        end
+
+        it 'stats it still properly report' do
+          port1 = 34_547
+          port2 = 33_219
+          memcached_persistent(p, port1) do |_first_dc, first_port|
+            memcached_persistent(p, port2) do |_second_dc, second_port|
+              dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
+              result = dc.stats
+
+              assert_instance_of Hash, result["localhost:#{first_port}"]
+              assert_instance_of Hash, result["localhost:#{second_port}"]
+
+              memcached_kill(first_port)
+
+              dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
+              result = dc.stats
+
+              assert_instance_of NilClass, result["localhost:#{first_port}"]
+              assert_instance_of Hash, result["localhost:#{second_port}"]
+
+              memcached_kill(second_port)
+            end
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_marshal.rb b/test/integration/test_marshal.rb
new file mode 100644
index 0000000..ed32e8a
--- /dev/null
+++ b/test/integration/test_marshal.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+require 'json'
+
+describe 'Serializer configuration' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'does not allow values over the 1MB limit' do
+        memcached_persistent(p) do |dc|
+          value = SecureRandom.random_bytes((1024 * 1024) + 30_000)
+
+          with_nil_logger do
+            assert_raises Dalli::ValueOverMaxSize do
+              dc.set('verylarge', value)
+            end
+          end
+        end
+      end
+
+      it 'allow large values under the limit to be set' do
+        memcached_persistent(p) do |dc|
+          value = '0' * 1024 * 1024
+
+          assert dc.set('verylarge', value, nil, compress: true)
+        end
+      end
+
+      it 'errors appropriately when the value cannot be marshalled' do
+        memcached_persistent(p) do |dc|
+          with_nil_logger do
+            assert_raises Dalli::MarshalError do
+              dc.set('a', proc { true })
+            end
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_memcached_admin.rb b/test/integration/test_memcached_admin.rb
new file mode 100644
index 0000000..73e9057
--- /dev/null
+++ b/test/integration/test_memcached_admin.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+require 'json'
+
+describe 'memcached admin commands' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      describe 'stats' do
+        it 'support stats' do
+          memcached_persistent(p) do |dc|
+            # make sure that get_hits would not equal 0
+            dc.set(:a, '1234567890' * 100_000)
+            dc.get(:a)
+
+            stats = dc.stats
+            servers = stats.keys
+
+            assert(servers.any? do |s|
+              stats[s]['get_hits'].to_i != 0
+            end, 'general stats failed')
+
+            stats_items = dc.stats(:items)
+            servers = stats_items.keys
+
+            assert(servers.all? do |s|
+              stats_items[s].keys.any? do |key|
+                key =~ /items:[0-9]+:number/
+              end
+            end, 'stats items failed')
+
+            stats_slabs = dc.stats(:slabs)
+            servers = stats_slabs.keys
+
+            assert(servers.all? do |s|
+              stats_slabs[s].keys.any?('active_slabs')
+            end, 'stats slabs failed')
+
+            # reset_stats test
+            results = dc.reset_stats
+
+            assert(results.all? { |x| x })
+            stats = dc.stats
+            servers = stats.keys
+
+            # check if reset was performed
+            servers.each do |s|
+              assert_equal 0, dc.stats[s]['get_hits'].to_i
+            end
+          end
+        end
+      end
+
+      describe 'version' do
+        it 'support version operation' do
+          memcached_persistent(p) do |dc|
+            v = dc.version
+            servers = v.keys
+
+            assert(servers.any? do |s|
+              !v[s].nil?
+            end, 'version failed')
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_namespace_and_key.rb b/test/integration/test_namespace_and_key.rb
new file mode 100644
index 0000000..bfb3f81
--- /dev/null
+++ b/test/integration/test_namespace_and_key.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'Namespace and key behavior' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'handles namespaced keys' do
+        memcached_persistent(p) do |_, port|
+          dc = Dalli::Client.new("localhost:#{port}", namespace: 'a')
+          dc.set('namespaced', 1)
+          dc2 = Dalli::Client.new("localhost:#{port}", namespace: 'b')
+          dc2.set('namespaced', 2)
+
+          assert_equal 1, dc.get('namespaced')
+          assert_equal 2, dc2.get('namespaced')
+        end
+      end
+
+      it 'handles a nil namespace' do
+        memcached_persistent(p) do |_, port|
+          dc = Dalli::Client.new("localhost:#{port}", namespace: nil)
+          dc.set('key', 1)
+
+          assert_equal 1, dc.get('key')
+        end
+      end
+
+      it 'truncates cache keys that are too long' do
+        memcached_persistent(p) do |_, port|
+          dc = Dalli::Client.new("localhost:#{port}", namespace: 'some:namspace')
+          key = 'this-cache-key-is-far-too-long-so-it-must-be-hashed-and-truncated-and-stuff' * 10
+          value = 'some value'
+
+          assert op_addset_succeeds(dc.set(key, value))
+          assert_equal value, dc.get(key)
+        end
+      end
+
+      it 'handles namespaced keys in get_multi' do
+        memcached_persistent(p) do |_, port|
+          dc = Dalli::Client.new("localhost:#{port}", namespace: 'a')
+          dc.set('a', 1)
+          dc.set('b', 2)
+
+          assert_equal({ 'a' => 1, 'b' => 2 }, dc.get_multi('a', 'b'))
+        end
+      end
+
+      it 'handles special Regexp characters in namespace with get_multi' do
+        memcached_persistent(p) do |_, port|
+          # /(?!)/ is a contradictory PCRE and should never be able to match
+          dc = Dalli::Client.new("localhost:#{port}", namespace: '(?!)')
+          dc.set('a', 1)
+          dc.set('b', 2)
+
+          assert_equal({ 'a' => 1, 'b' => 2 }, dc.get_multi('a', 'b'))
+        end
+      end
+
+      it 'allows whitespace characters in keys' do
+        memcached_persistent(p) do |dc|
+          dc.set "\t", 1
+
+          assert_equal 1, dc.get("\t")
+          dc.set "\n", 1
+
+          assert_equal 1, dc.get("\n")
+          dc.set '   ', 1
+
+          assert_equal 1, dc.get('   ')
+        end
+      end
+
+      it 'does not allow blanks for keys' do
+        memcached_persistent(p) do |dc|
+          assert_raises ArgumentError do
+            dc.set '', 1
+          end
+          assert_raises ArgumentError do
+            dc.set nil, 1
+          end
+        end
+      end
+
+      it 'allow the namespace to be a symbol' do
+        memcached_persistent(p) do |_, port|
+          dc = Dalli::Client.new("localhost:#{port}", namespace: :wunderschoen)
+          dc.set 'x' * 251, 1
+
+          assert_equal(1, dc.get(('x' * 251).to_s))
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_network.rb b/test/integration/test_network.rb
new file mode 100644
index 0000000..cac8a87
--- /dev/null
+++ b/test/integration/test_network.rb
@@ -0,0 +1,364 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'Network' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      describe 'assuming a bad network' do
+        it 'handle no server available' do
+          dc = Dalli::Client.new 'localhost:19333'
+          assert_raises Dalli::RingError, message: 'No server available' do
+            dc.get 'foo'
+          end
+        end
+
+        describe 'with a fake server' do
+          it 'handle connection reset' do
+            memcached_mock(->(sock) { sock.close }) do
+              dc = Dalli::Client.new('localhost:19123')
+              assert_raises Dalli::RingError, message: 'No server available' do
+                dc.get('abc')
+              end
+            end
+          end
+
+          it 'handle connection reset with unix socket' do
+            socket_path = MemcachedMock::UNIX_SOCKET_PATH
+            memcached_mock(->(sock) { sock.close }, :start_unix, socket_path) do
+              dc = Dalli::Client.new(socket_path)
+              assert_raises Dalli::RingError, message: 'No server available' do
+                dc.get('abc')
+              end
+            end
+          end
+
+          it 'handle malformed response' do
+            memcached_mock(->(sock) { sock.write('123') }) do
+              dc = Dalli::Client.new('localhost:19123')
+              assert_raises Dalli::RingError, message: 'No server available' do
+                dc.get('abc')
+              end
+            end
+          end
+
+          it 'handle socket timeouts' do
+            dc = Dalli::Client.new('localhost:19123', socket_timeout: 0)
+            assert_raises Dalli::RingError, message: 'No server available' do
+              dc.get('abc')
+            end
+          end
+
+          it 'handle connect timeouts' do
+            memcached_mock(lambda { |sock|
+                             sleep(0.6)
+                             sock.close
+                           }, :delayed_start) do
+              dc = Dalli::Client.new('localhost:19123')
+              assert_raises Dalli::RingError, message: 'No server available' do
+                dc.get('abc')
+              end
+            end
+          end
+
+          it 'handle read timeouts' do
+            memcached_mock(lambda { |sock|
+                             sleep(0.6)
+                             sock.write('giraffe')
+                           }) do
+              dc = Dalli::Client.new('localhost:19123')
+              assert_raises Dalli::RingError, message: 'No server available' do
+                dc.get('abc')
+              end
+            end
+          end
+        end
+
+        it 'opens a standard TCP connection when ssl_context is not configured' do
+          memcached_persistent(p) do |dc|
+            server = dc.send(:ring).servers.first
+            sock = Dalli::Socket::TCP.open(server.hostname, server.port, server.options)
+
+            assert_instance_of Dalli::Socket::TCP, sock
+
+            dc.set('abc', 123)
+
+            assert_equal(123, dc.get('abc'))
+          end
+        end
+
+        it 'opens a SSL TCP connection when there is an SSL context set' do
+          memcached_ssl_persistent(p) do |dc|
+            server = dc.send(:ring).servers.first
+            sock = Dalli::Socket::TCP.open(server.hostname, server.port, server.options)
+
+            assert_instance_of Dalli::Socket::SSLSocket, sock
+
+            dc.set('abc', 123)
+
+            assert_equal(123, dc.get('abc'))
+
+            # Confirm that pipelined get works, since this depends on attributes on
+            # the socket
+            assert_equal({ 'abc' => 123 }, dc.get_multi(['abc']))
+          end
+        end
+
+        it 'allow TCP connections to be configured for keepalive' do
+          memcached_persistent(p) do |_, port|
+            dc = Dalli::Client.new("localhost:#{port}", keepalive: true)
+            dc.set(:a, 1)
+            ring = dc.send(:ring)
+            server = ring.servers.first
+            socket = server.sock
+
+            optval = socket.getsockopt(Socket::SOL_SOCKET, Socket::SO_KEEPALIVE)
+            optval = optval.unpack 'i'
+
+            refute_equal(optval[0], 0)
+          end
+        end
+      end
+
+      it 'handles timeout error during pipelined get' do
+        with_nil_logger do
+          memcached(p, 19_191) do |dc|
+            dc.send(:ring).server_for_key('abc').sock.stub(:write, proc { raise Timeout::Error }) do
+              assert_empty dc.get_multi(['abc'])
+            end
+          end
+        end
+      end
+
+      it 'handles asynchronous Thread#raise' do
+        with_nil_logger do
+          memcached(p, 19_191) do |dc|
+            10.times do |i|
+              thread = Thread.new do
+                loop do
+                  assert_instance_of Integer, dc.set("key:#{i}", i.to_s)
+                end
+              rescue RuntimeError
+                nil # expected
+              end
+              thread.join(rand(0.01..0.2))
+
+              thread.raise('Test Timeout Error')
+              joined_thread = thread.join(1)
+
+              refute_nil joined_thread
+              refute_predicate joined_thread, :alive?
+              assert_equal i.to_s, dc.get("key:#{i}")
+            end
+          end
+        end
+      end
+
+      it 'handles asynchronous Thread#raise during pipelined get' do
+        with_nil_logger do
+          memcached(p, 19_191) do |dc|
+            10.times do |i|
+              expected_response = 100.times.to_h { |x| ["key:#{i}:#{x}", x.to_s] }
+              expected_response.each do |key, val|
+                dc.set(key, val)
+              end
+
+              thread = Thread.new do
+                loop do
+                  assert_equal expected_response, dc.get_multi(expected_response.keys)
+                end
+              rescue RuntimeError
+                nil # expected
+              end
+              thread.join(rand(0.01..0.2))
+
+              thread.raise('Test Timeout Error')
+              joined_thread = thread.join(1)
+
+              refute_nil joined_thread
+              refute_predicate joined_thread, :alive?
+              assert_equal expected_response, dc.get_multi(expected_response.keys)
+            end
+          end
+        end
+      end
+
+      it 'handles asynchronous Thread#kill' do
+        with_nil_logger do
+          memcached(p, 19_191) do |dc|
+            10.times do |i|
+              thread = Thread.new do
+                loop do
+                  assert_instance_of Integer, dc.set("key:#{i}", i.to_s)
+                end
+              rescue RuntimeError
+                nil # expected
+              end
+              thread.join(rand(0.01..0.2))
+
+              thread.kill
+              joined_thread = thread.join(1)
+
+              refute_nil joined_thread
+              refute_predicate joined_thread, :alive?
+              assert_equal i.to_s, dc.get("key:#{i}")
+            end
+          end
+        end
+      end
+
+      it 'handles asynchronous Thread#kill during pipelined get' do
+        with_nil_logger do
+          memcached(p, 19_191) do |dc|
+            10.times do |i|
+              expected_response = 100.times.to_h { |x| ["key:#{i}:#{x}", x.to_s] }
+              expected_response.each do |key, val|
+                dc.set(key, val)
+              end
+
+              thread = Thread.new do
+                loop do
+                  assert_equal expected_response, dc.get_multi(expected_response.keys)
+                end
+              rescue RuntimeError
+                nil # expected
+              end
+              thread.join(rand(0.01..0.2))
+
+              thread.kill
+              joined_thread = thread.join(1)
+
+              refute_nil joined_thread
+              refute_predicate joined_thread, :alive?
+              assert_equal expected_response, dc.get_multi(expected_response.keys)
+            end
+          end
+        end
+      end
+
+      it 'passes a simple smoke test on a TCP socket' do
+        memcached_persistent(p) do |dc, port|
+          resp = dc.flush
+
+          refute_nil resp
+          assert_equal [true, true], resp
+
+          assert op_addset_succeeds(dc.set(:foo, 'bar'))
+          assert_equal 'bar', dc.get(:foo)
+
+          resp = dc.get('123')
+
+          assert_nil resp
+
+          assert op_addset_succeeds(dc.set('123', 'xyz'))
+
+          resp = dc.get('123')
+
+          assert_equal 'xyz', resp
+
+          assert op_addset_succeeds(dc.set('123', 'abc'))
+
+          dc.prepend('123', '0')
+          dc.append('123', '0')
+
+          assert_raises Dalli::UnmarshalError do
+            resp = dc.get('123')
+          end
+
+          dc.close
+          dc = nil
+
+          dc = Dalli::Client.new("localhost:#{port}", digest_class: OpenSSL::Digest::SHA1)
+
+          assert op_addset_succeeds(dc.set('456', 'xyz', 0, raw: true))
+
+          resp = dc.prepend '456', '0'
+
+          assert resp
+
+          resp = dc.append '456', '9'
+
+          assert resp
+
+          resp = dc.get('456', raw: true)
+
+          assert_equal '0xyz9', resp
+
+          assert op_addset_succeeds(dc.set('456', false))
+
+          resp = dc.get('456')
+
+          refute resp
+
+          resp = dc.stats
+
+          assert_instance_of Hash, resp
+
+          dc.close
+        end
+      end
+
+      it 'passes a simple smoke test on unix socket' do
+        memcached_persistent(:binary, MemcachedMock::UNIX_SOCKET_PATH) do |dc, path|
+          resp = dc.flush
+
+          refute_nil resp
+          assert_equal [true], resp
+
+          assert op_addset_succeeds(dc.set(:foo, 'bar'))
+          assert_equal 'bar', dc.get(:foo)
+
+          resp = dc.get('123')
+
+          assert_nil resp
+
+          assert op_addset_succeeds(dc.set('123', 'xyz'))
+
+          resp = dc.get('123')
+
+          assert_equal 'xyz', resp
+
+          assert op_addset_succeeds(dc.set('123', 'abc'))
+
+          dc.prepend('123', '0')
+          dc.append('123', '0')
+
+          assert_raises Dalli::UnmarshalError do
+            resp = dc.get('123')
+          end
+
+          dc.close
+          dc = nil
+
+          dc = Dalli::Client.new(path)
+
+          assert op_addset_succeeds(dc.set('456', 'xyz', 0, raw: true))
+
+          resp = dc.prepend '456', '0'
+
+          assert resp
+
+          resp = dc.append '456', '9'
+
+          assert resp
+
+          resp = dc.get('456', raw: true)
+
+          assert_equal '0xyz9', resp
+
+          assert op_addset_succeeds(dc.set('456', false))
+
+          resp = dc.get('456')
+
+          refute resp
+
+          resp = dc.stats
+
+          assert_instance_of Hash, resp
+
+          dc.close
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_operations.rb b/test/integration/test_operations.rb
new file mode 100644
index 0000000..f47962b
--- /dev/null
+++ b/test/integration/test_operations.rb
@@ -0,0 +1,379 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+require 'openssl'
+require 'securerandom'
+
+describe 'operations' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      describe 'get' do
+        it 'returns the value on a hit' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            val1 = '1234567890' * 999_999
+            dc.set('a', val1)
+            val2 = dc.get('a')
+
+            assert_equal val1, val2
+
+            assert op_addset_succeeds(dc.set('a', nil))
+            assert_nil dc.get('a')
+          end
+        end
+
+        it 'returns nil on a miss' do
+          memcached_persistent(p) do |dc|
+            assert_nil dc.get('notexist')
+          end
+        end
+
+        # This is historical, as the 'Not found' value was
+        # treated as a special case at one time
+        it 'allows "Not found" as value' do
+          memcached_persistent(p) do |dc|
+            dc.set('key1', 'Not found')
+
+            assert_equal 'Not found', dc.get('key1')
+          end
+        end
+      end
+
+      describe 'gat' do
+        it 'returns the value and touches on a hit' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+            dc.set 'key', 'value'
+
+            assert_equal 'value', dc.gat('key', 10)
+            assert_equal 'value', dc.gat('key')
+          end
+        end
+
+        it 'returns nil on a miss' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            assert_nil dc.gat('notexist', 10)
+          end
+        end
+      end
+
+      describe 'touch' do
+        it 'returns true on a hit' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+            dc.set 'key', 'value'
+
+            assert dc.touch('key', 10)
+            assert dc.touch('key')
+            assert_equal 'value', dc.get('key')
+            assert_nil dc.touch('notexist')
+          rescue Dalli::DalliError => e
+            # This will happen when memcached is in lesser version than 1.4.8
+            assert_equal 'Response error 129: Unknown command', e.message
+          end
+        end
+
+        it 'returns nil on a miss' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            assert_nil dc.touch('notexist')
+          end
+        end
+      end
+
+      describe 'set' do
+        it 'returns a CAS when the key exists and updates the value' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+            dc.set('key', 'value')
+
+            assert op_replace_succeeds(dc.set('key', 'value2'))
+
+            assert_equal 'value2', dc.get('key')
+          end
+        end
+
+        it 'returns a CAS when no pre-existing value exists' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            assert op_replace_succeeds(dc.set('key', 'value2'))
+            assert_equal 'value2', dc.get('key')
+          end
+        end
+      end
+
+      describe 'add' do
+        it 'returns false when replacing an existing value and does not update the value' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+            dc.set('key', 'value')
+
+            refute dc.add('key', 'value')
+
+            assert_equal 'value', dc.get('key')
+          end
+        end
+
+        it 'returns a CAS when no pre-existing value exists' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            assert op_replace_succeeds(dc.add('key', 'value2'))
+          end
+        end
+      end
+
+      describe 'replace' do
+        it 'returns a CAS when the key exists and updates the value' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+            dc.set('key', 'value')
+
+            assert op_replace_succeeds(dc.replace('key', 'value2'))
+
+            assert_equal 'value2', dc.get('key')
+          end
+        end
+
+        it 'returns false when no pre-existing value exists' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            refute dc.replace('key', 'value')
+          end
+        end
+      end
+
+      describe 'delete' do
+        it 'returns true on a hit and deletes the entry' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+            dc.set('some_key', 'some_value')
+
+            assert_equal 'some_value', dc.get('some_key')
+
+            assert dc.delete('some_key')
+            assert_nil dc.get('some_key')
+
+            refute dc.delete('nonexist')
+          end
+        end
+
+        it 'returns false on a miss' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            refute dc.delete('nonexist')
+          end
+        end
+      end
+
+      describe 'fetch' do
+        it 'fetches pre-existing values' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+            dc.set('fetch_key', 'Not found')
+            res = dc.fetch('fetch_key') { flunk 'fetch block called' }
+
+            assert_equal 'Not found', res
+          end
+        end
+
+        it 'supports with default values' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            expected = { 'blah' => 'blerg!' }
+            executed = false
+            value = dc.fetch('fetch_key') do
+              executed = true
+              expected
+            end
+
+            assert_equal expected, value
+            assert executed
+
+            executed = false
+            value = dc.fetch('fetch_key') do
+              executed = true
+              expected
+            end
+
+            assert_equal expected, value
+            refute executed
+          end
+        end
+
+        it 'supports with falsey values' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            dc.set('fetch_key', false)
+            res = dc.fetch('fetch_key') { flunk 'fetch block called' }
+
+            refute res
+          end
+        end
+
+        it 'supports with nil values when cache_nils: true' do
+          memcached_persistent(p, 21_345, '', cache_nils: true) do |dc|
+            dc.flush
+
+            dc.set('fetch_key', nil)
+            res = dc.fetch('fetch_key') { flunk 'fetch block called' }
+
+            assert_nil res
+          end
+
+          memcached_persistent(p, 21_345, '', cache_nils: false) do |dc|
+            dc.flush
+            dc.set('fetch_key', nil)
+            executed = false
+            res = dc.fetch('fetch_key') do
+              executed = true
+              'bar'
+            end
+
+            assert_equal 'bar', res
+            assert executed
+          end
+        end
+      end
+
+      describe 'incr/decr' do
+        it 'supports incrementing and decrementing existing values' do
+          memcached_persistent(p) do |client|
+            client.flush
+
+            assert op_addset_succeeds(client.set('fakecounter', 0, 0, raw: true))
+            assert_equal 1, client.incr('fakecounter', 1)
+            assert_equal 2, client.incr('fakecounter', 1)
+            assert_equal 3, client.incr('fakecounter', 1)
+            assert_equal 1, client.decr('fakecounter', 2)
+            assert_equal '1', client.get('fakecounter')
+          end
+        end
+
+        it 'returns nil on a miss with no initial value' do
+          memcached_persistent(p) do |client|
+            client.flush
+
+            resp = client.incr('mycounter', 1)
+
+            assert_nil resp
+
+            resp = client.decr('mycounter', 1)
+
+            assert_nil resp
+          end
+        end
+
+        it 'enables setting an initial value with incr and subsequently incrementing/decrementing' do
+          memcached_persistent(p) do |client|
+            client.flush
+
+            resp = client.incr('mycounter', 1, 0, 2)
+
+            assert_equal 2, resp
+            resp = client.incr('mycounter', 1)
+
+            assert_equal 3, resp
+
+            resp = client.decr('mycounter', 2)
+
+            assert_equal 1, resp
+          end
+        end
+
+        it 'supports setting the initial value with decr and subsequently incrementing/decrementing' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            resp = dc.decr('counter', 100, 5, 0)
+
+            assert_equal 0, resp
+
+            resp = dc.decr('counter', 10)
+
+            assert_equal 0, resp
+
+            resp = dc.incr('counter', 10)
+
+            assert_equal 10, resp
+
+            current = 10
+            100.times do |x|
+              resp = dc.incr('counter', 10)
+
+              assert_equal current + ((x + 1) * 10), resp
+            end
+          end
+        end
+
+        it 'supports 64-bit values' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            resp = dc.decr('10billion', 0, 5, 10)
+
+            assert_equal 10, resp
+            # go over the 32-bit mark to verify proper (un)packing
+            resp = dc.incr('10billion', 10_000_000_000)
+
+            assert_equal 10_000_000_010, resp
+
+            resp = dc.decr('10billion', 1)
+
+            assert_equal 10_000_000_009, resp
+
+            resp = dc.decr('10billion', 0)
+
+            assert_equal 10_000_000_009, resp
+
+            resp = dc.incr('10billion', 0)
+
+            assert_equal 10_000_000_009, resp
+
+            resp = dc.decr('10billion', 9_999_999_999)
+
+            assert_equal 10, resp
+
+            resp = dc.incr('big', 100, 5, 0xFFFFFFFFFFFFFFFE)
+
+            assert_equal 0xFFFFFFFFFFFFFFFE, resp
+            resp = dc.incr('big', 1)
+
+            assert_equal 0xFFFFFFFFFFFFFFFF, resp
+
+            # rollover the 64-bit value, we'll get something undefined.
+            resp = dc.incr('big', 1)
+
+            refute_equal 0x10000000000000000, resp
+            dc.reset
+          end
+        end
+      end
+
+      describe 'append/prepend' do
+        it 'support the append and prepend operations' do
+          memcached_persistent(p) do |dc|
+            dc.flush
+
+            assert op_addset_succeeds(dc.set('456', 'xyz', 0, raw: true))
+            assert dc.prepend('456', '0')
+            assert dc.append('456', '9')
+            assert_equal '0xyz9', dc.get('456')
+
+            refute dc.append('nonexist', 'abc')
+            refute dc.prepend('nonexist', 'abc')
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_pipelined_get.rb b/test/integration/test_pipelined_get.rb
new file mode 100644
index 0000000..2f3f001
--- /dev/null
+++ b/test/integration/test_pipelined_get.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'Pipelined Get' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'supports pipelined get' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          resp = dc.get_multi(%w[a b c d e f])
+
+          assert_empty(resp)
+
+          dc.set('a', 'foo')
+          dc.set('b', 123)
+          dc.set('c', %w[a b c])
+
+          # Invocation without block
+          resp = dc.get_multi(%w[a b c d e f])
+          expected_resp = { 'a' => 'foo', 'b' => 123, 'c' => %w[a b c] }
+
+          assert_equal(expected_resp, resp)
+
+          # Invocation with block
+          dc.get_multi(%w[a b c d e f]) do |k, v|
+            assert(expected_resp.key?(k) && expected_resp[k] == v)
+            expected_resp.delete(k)
+          end
+
+          assert_empty expected_resp
+
+          # Perform a big quiet set with 1000 elements.
+          arr = []
+          dc.multi do
+            1000.times do |idx|
+              dc.set idx, idx
+              arr << idx
+            end
+          end
+
+          # Retrieve the elements with a pipelined get
+          result = dc.get_multi(arr)
+
+          assert_equal(1000, result.size)
+          assert_equal(50, result['50'])
+        end
+      end
+
+      it 'supports pipelined get with keys containing Unicode or spaces' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+
+          keys_to_query = ['a', 'b', 'contains space', 'ƒ©åÍÎ']
+
+          resp = dc.get_multi(keys_to_query)
+
+          assert_empty(resp)
+
+          dc.set('a', 'foo')
+          dc.set('contains space', 123)
+          dc.set('ƒ©åÍÎ', %w[a b c])
+
+          # Invocation without block
+          resp = dc.get_multi(keys_to_query)
+          expected_resp = { 'a' => 'foo', 'contains space' => 123, 'ƒ©åÍÎ' => %w[a b c] }
+
+          assert_equal(expected_resp, resp)
+
+          # Invocation with block
+          dc.get_multi(keys_to_query) do |k, v|
+            assert(expected_resp.key?(k) && expected_resp[k] == v)
+            expected_resp.delete(k)
+          end
+
+          assert_empty expected_resp
+        end
+      end
+
+      describe 'pipeline_next_responses' do
+        it 'raises NetworkError when called before pipeline_response_setup' do
+          memcached_persistent(p) do |dc|
+            server = dc.send(:ring).servers.first
+            server.request(:pipelined_get, %w[a b])
+            assert_raises Dalli::NetworkError do
+              server.pipeline_next_responses
+            end
+          end
+        end
+
+        it 'raises NetworkError when called after pipeline_abort' do
+          memcached_persistent(p) do |dc|
+            server = dc.send(:ring).servers.first
+            server.request(:pipelined_get, %w[a b])
+            server.pipeline_response_setup
+            server.pipeline_abort
+            assert_raises Dalli::NetworkError do
+              server.pipeline_next_responses
+            end
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_quiet.rb b/test/integration/test_quiet.rb
new file mode 100644
index 0000000..0c980ca
--- /dev/null
+++ b/test/integration/test_quiet.rb
@@ -0,0 +1,283 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'Quiet behavior' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'supports the use of set in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          key = SecureRandom.hex(3)
+          value = SecureRandom.hex(3)
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be nil
+            assert_nil dc.set(key, value)
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+
+          assert_equal value, dc.get(key)
+        end
+      end
+
+      it 'supports the use of add in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          key = SecureRandom.hex(3)
+          existing = SecureRandom.hex(4)
+          oldvalue = SecureRandom.hex(3)
+          value = SecureRandom.hex(3)
+          dc.set(existing, oldvalue)
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be nil
+            assert_nil dc.add(key, value)
+
+            # Should handle error case without error or unexpected behavior
+            assert_nil dc.add(existing, value)
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+
+          assert_equal value, dc.get(key)
+          assert_equal oldvalue, dc.get(existing)
+        end
+      end
+
+      it 'supports the use of replace in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          key = SecureRandom.hex(3)
+          nonexistent = SecureRandom.hex(4)
+          oldvalue = SecureRandom.hex(3)
+          value = SecureRandom.hex(3)
+          dc.set(key, oldvalue)
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be nil
+            assert_nil dc.replace(key, value)
+
+            # Should handle error case without error or unexpected behavior
+            assert_nil dc.replace(nonexistent, value)
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+
+          assert_equal value, dc.get(key)
+          assert_nil dc.get(nonexistent)
+        end
+      end
+
+      it 'supports the use of delete in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          key = SecureRandom.hex(3)
+          existing = SecureRandom.hex(4)
+          value = SecureRandom.hex(3)
+          dc.set(existing, value)
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be nil
+            assert_nil dc.delete(existing)
+
+            # Should handle error case without error or unexpected behavior
+            assert_nil dc.delete(key)
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+
+          assert_nil dc.get(existing)
+          assert_nil dc.get(key)
+        end
+      end
+
+      it 'supports the use of append in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          key = SecureRandom.hex(3)
+          value = SecureRandom.hex(3)
+          dc.set(key, value, 90, raw: true)
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be nil
+            assert_nil dc.append(key, 'abc')
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+
+          assert_equal "#{value}abc", dc.get(key)
+        end
+      end
+
+      it 'supports the use of prepend in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          key = SecureRandom.hex(3)
+          value = SecureRandom.hex(3)
+          dc.set(key, value, 90, raw: true)
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be nil
+            assert_nil dc.prepend(key, 'abc')
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+
+          assert_equal "abc#{value}", dc.get(key)
+        end
+      end
+
+      it 'supports the use of incr in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          key = SecureRandom.hex(3)
+          value = 546
+          incr = 134
+          dc.set(key, value, 90, raw: true)
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be nil
+            assert_nil dc.incr(key, incr)
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+
+          assert_equal 680, dc.get(key).to_i
+        end
+      end
+
+      it 'supports the use of decr in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          key = SecureRandom.hex(3)
+          value = 546
+          incr = 134
+          dc.set(key, value, 90, raw: true)
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be nil
+            assert_nil dc.decr(key, incr)
+          end
+
+          assert_equal 412, dc.get(key).to_i
+        end
+      end
+
+      it 'supports the use of flush in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.quiet do
+            assert_operator Thread.current, :[], Dalli::QUIET
+
+            # Response should be a non-empty array of nils
+            arr = dc.flush(90)
+
+            assert_equal 2, arr.size
+            assert arr.all?(&:nil?)
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+        end
+      end
+
+      it 'does not corrupt the underlying response buffer when a memcached error occurs in a quiet block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          dc.set('a', 'av')
+          dc.set('b', 'bv')
+
+          assert_equal 'av', dc.get('a')
+          assert_equal 'bv', dc.get('b')
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.multi do
+            assert_operator Thread.current, :[], Dalli::QUIET
+            dc.delete('non_existent_key')
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          assert_equal 'av', dc.get('a')
+          assert_equal 'bv', dc.get('b')
+        end
+      end
+
+      it 'raises an error if an invalid operation is used in a multi block' do
+        memcached_persistent(p) do |dc|
+          dc.close
+          dc.flush
+          dc.set('a', 'av')
+          dc.set('b', 'bv')
+
+          assert_equal 'av', dc.get('a')
+          assert_equal 'bv', dc.get('b')
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+          dc.multi do
+            assert_operator Thread.current, :[], Dalli::QUIET
+            assert_raises Dalli::NotPermittedMultiOpError do
+              dc.get('a')
+            end
+          end
+
+          refute_operator Thread.current, :[], Dalli::QUIET
+        end
+      end
+
+      describe 'quiet? method' do
+        it 'has protocol instances that respond to quiet?' do
+          memcached_persistent(p) do |dc|
+            s = dc.send(:ring).servers.first
+
+            assert_respond_to s, :quiet?
+          end
+        end
+
+        it 'has protocol instances that respond to multi?' do
+          memcached_persistent(p) do |dc|
+            s = dc.send(:ring).servers.first
+
+            assert_respond_to s, :multi?
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_sasl.rb b/test/integration/test_sasl.rb
new file mode 100644
index 0000000..9b762f2
--- /dev/null
+++ b/test/integration/test_sasl.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+# This is a binary protocol only set of tests
+describe 'Sasl' do
+  def self.sasl_it(msg, &block)
+    it(msg, &block) if ENV['RUN_SASL_TESTS']
+  end
+
+  describe 'when the server is configured to require authentication' do
+    before do
+      @server = Minitest::Mock.new
+      @server.expect(:request, true)
+      @server.expect(:weight, 1)
+      @server.expect(:name, 'localhost:19124')
+    end
+
+    describe 'with incorrect authentication credentials' do
+      describe 'from the environment variables' do
+        before do
+          ENV['MEMCACHE_USERNAME'] = 'foo'
+          ENV['MEMCACHE_PASSWORD'] = 'wrongpwd'
+        end
+
+        after do
+          ENV['MEMCACHE_USERNAME'] = nil
+          ENV['MEMCACHE_PASSWORD'] = nil
+        end
+
+        sasl_it 'fails and raises the expected error' do
+          memcached_sasl_persistent do |_, port|
+            dc = Dalli::Client.new("localhost:#{port}")
+            assert_error Dalli::DalliError, /0x20/ do
+              dc.set('abc', 123)
+            end
+          end
+        end
+      end
+
+      describe 'passed in as options' do
+        sasl_it 'fails and raises the expected error' do
+          memcached_sasl_persistent do |_, port|
+            dc = Dalli::Client.new("localhost:#{port}", username: 'foo', password: 'wrongpwd')
+            assert_error Dalli::DalliError, /0x20/ do
+              dc.set('abc', 123)
+            end
+          end
+        end
+      end
+    end
+
+    # OSX: Create a SASL user for the memcached application like so:
+    #
+    # saslpasswd2 -a memcached -c testuser
+    #
+    # with password 'testtest'
+    describe 'in an authenticated environment' do
+      before do
+        ENV['MEMCACHE_USERNAME'] = 'testuser'
+        ENV['MEMCACHE_PASSWORD'] = 'testtest'
+      end
+
+      after do
+        ENV['MEMCACHE_USERNAME'] = nil
+        ENV['MEMCACHE_PASSWORD'] = nil
+      end
+
+      sasl_it 'pass SASL authentication' do
+        memcached_sasl_persistent do |dc|
+          # I get "Dalli::DalliError: Error authenticating: 0x20" in OSX
+          # but SASL works on Heroku servers. YMMV.
+          assert dc.set('abc', 123)
+          assert_equal 123, dc.get('abc')
+        end
+      end
+    end
+
+    sasl_it 'pass SASL authentication with options' do
+      memcached_sasl_persistent do |_, port|
+        dc = Dalli::Client.new("localhost:#{port}", sasl_credentials)
+        # I get "Dalli::DalliError: Error authenticating: 32" in OSX
+        # but SASL works on Heroku servers. YMMV.
+        assert dc.set('abc', 123)
+        assert_equal 123, dc.get('abc')
+      end
+    end
+  end
+end
diff --git a/test/integration/test_serializer.rb b/test/integration/test_serializer.rb
new file mode 100644
index 0000000..46dff8a
--- /dev/null
+++ b/test/integration/test_serializer.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+require 'json'
+
+describe 'Serializer configuration' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'defaults to Marshal' do
+        memcached(p, 29_198) do |dc|
+          dc.set 1, 2
+
+          assert_equal Marshal, dc.instance_variable_get(:@ring).servers.first.serializer
+        end
+      end
+
+      it 'supports a custom serializer' do
+        memcached(p, 29_198) do |_dc, port|
+          memcache = Dalli::Client.new("127.0.0.1:#{port}", serializer: JSON)
+          memcache.set 1, 2
+          begin
+            assert_equal JSON, memcache.instance_variable_get(:@ring).servers.first.serializer
+
+            memcached(p, 21_956) do |newdc|
+              assert newdc.set('json_test', { 'foo' => 'bar' })
+              assert_equal({ 'foo' => 'bar' }, newdc.get('json_test'))
+            end
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/integration/test_ttl.rb b/test/integration/test_ttl.rb
new file mode 100644
index 0000000..12b6578
--- /dev/null
+++ b/test/integration/test_ttl.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require_relative '../helper'
+
+describe 'TTL behavior' do
+  MemcachedManager.supported_protocols.each do |p|
+    describe "using the #{p} protocol" do
+      it 'raises error with invalid client level expires_in' do
+        bad_data = [{ bad: 'expires in data' }, Hash, [1, 2, 3]]
+
+        bad_data.each do |bad|
+          assert_raises ArgumentError do
+            Dalli::Client.new('foo', { expires_in: bad })
+          end
+        end
+      end
+
+      it 'supports a TTL on set' do
+        memcached_persistent(p) do |dc|
+          key = 'foo'
+
+          assert dc.set(key, 'bar', 1)
+          assert_equal 'bar', dc.get(key)
+          sleep 1.2
+
+          assert_nil dc.get(key)
+        end
+      end
+
+      it 'generates an ArgumentError for ttl that does not support to_i' do
+        memcached_persistent(p) do |dc|
+          assert_raises ArgumentError do
+            dc.set('foo', 'bar', [])
+          end
+        end
+      end
+    end
+  end
+end
diff --git a/test/protocol/meta/test_request_formatter.rb b/test/protocol/meta/test_request_formatter.rb
new file mode 100644
index 0000000..ce8958a
--- /dev/null
+++ b/test/protocol/meta/test_request_formatter.rb
@@ -0,0 +1,245 @@
+# frozen_string_literal: true
+
+require_relative '../../helper'
+
+describe Dalli::Protocol::Meta::RequestFormatter do
+  describe 'meta_get' do
+    let(:key) { SecureRandom.hex(4) }
+    let(:ttl) { rand(1000..1999) }
+
+    it 'returns the default get (get value and bitflags, no cas) when passed only a key' do
+      assert_equal "mg #{key} v f\r\n", Dalli::Protocol::Meta::RequestFormatter.meta_get(key: key)
+    end
+
+    it 'sets the TTL flag when passed a ttl' do
+      assert_equal "mg #{key} v f T#{ttl}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_get(key: key, ttl: ttl)
+    end
+
+    it 'skips the value and bitflags when passed a pure touch argument' do
+      assert_equal "mg #{key} T#{ttl}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_get(key: key, value: false, ttl: ttl)
+    end
+
+    it 'sets the CAS retrieval flags when passed that value' do
+      assert_equal "mg #{key} c\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_get(key: key, value: false, return_cas: true)
+    end
+
+    it 'sets the flags for returning the key and body size when passed quiet' do
+      assert_equal "mg #{key} v f k q s\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_get(key: key, quiet: true)
+    end
+  end
+
+  describe 'meta_set' do
+    let(:key) { SecureRandom.hex(4) }
+    let(:hexlen) { rand(500..999) }
+    let(:val) { SecureRandom.hex(hexlen) }
+    let(:bitflags) { (0..3).to_a.sample }
+    let(:cas) { rand(500..999) }
+    let(:ttl) { rand(500..999) }
+
+    it 'returns the default (treat as a set, no CAS check) when just passed key, datalen, and bitflags' do
+      assert_equal "ms #{key} #{val.bytesize} c F#{bitflags} MS\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, bitflags: bitflags)
+    end
+
+    it 'supports the add mode' do
+      assert_equal "ms #{key} #{val.bytesize} c F#{bitflags} ME\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, bitflags: bitflags,
+                                                                    mode: :add)
+    end
+
+    it 'supports the replace mode' do
+      assert_equal "ms #{key} #{val.bytesize} c F#{bitflags} MR\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, bitflags: bitflags,
+                                                                    mode: :replace)
+    end
+
+    it 'passes a TTL if one is provided' do
+      assert_equal "ms #{key} #{val.bytesize} c F#{bitflags} T#{ttl} MS\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, ttl: ttl, bitflags: bitflags)
+    end
+
+    it 'omits the CAS flag on append' do
+      assert_equal "ms #{key} #{val.bytesize} MA\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, mode: :append)
+    end
+
+    it 'omits the CAS flag on prepend' do
+      assert_equal "ms #{key} #{val.bytesize} MP\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, mode: :prepend)
+    end
+
+    it 'passes a CAS if one is provided' do
+      assert_equal "ms #{key} #{val.bytesize} c F#{bitflags} C#{cas} MS\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, bitflags: bitflags, cas: cas)
+    end
+
+    it 'excludes CAS if set to 0' do
+      assert_equal "ms #{key} #{val.bytesize} c F#{bitflags} MS\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, bitflags: bitflags, cas: 0)
+    end
+
+    it 'excludes non-numeric CAS values' do
+      assert_equal "ms #{key} #{val.bytesize} c F#{bitflags} MS\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, bitflags: bitflags,
+                                                                    cas: "\nset importantkey 1 1000 8\ninjected")
+    end
+
+    it 'sets the quiet mode if configured' do
+      assert_equal "ms #{key} #{val.bytesize} c F#{bitflags} MS q\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, bitflags: bitflags,
+                                                                    quiet: true)
+    end
+
+    it 'sets the base64 mode if configured' do
+      assert_equal "ms #{key} #{val.bytesize} c b F#{bitflags} MS\r\n#{val}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_set(key: key, value: val, bitflags: bitflags,
+                                                                    base64: true)
+    end
+  end
+
+  describe 'meta_delete' do
+    let(:key) { SecureRandom.hex(4) }
+    let(:cas) { rand(1000..1999) }
+
+    it 'returns the default when just passed key' do
+      assert_equal "md #{key}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_delete(key: key)
+    end
+
+    it 'incorporates CAS when passed cas' do
+      assert_equal "md #{key} C#{cas}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_delete(key: key, cas: cas)
+    end
+
+    it 'sets the q flag when passed quiet' do
+      assert_equal "md #{key} q\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_delete(key: key, quiet: true)
+    end
+
+    it 'excludes CAS when set to 0' do
+      assert_equal "md #{key}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_delete(key: key, cas: 0)
+    end
+
+    it 'excludes non-numeric CAS values' do
+      assert_equal "md #{key}\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_delete(key: key,
+                                                                       cas: "\nset importantkey 1 1000 8\ninjected")
+    end
+
+    it 'sets the base64 mode if configured' do
+      assert_equal "md #{key} b\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_delete(key: key, base64: true)
+    end
+  end
+
+  describe 'meta_arithmetic' do
+    let(:key) { SecureRandom.hex(4) }
+    let(:delta) { rand(500..999) }
+    let(:initial) { rand(500..999) }
+    let(:cas) { rand(500..999) }
+    let(:ttl) { rand(500..999) }
+
+    it 'returns the expected string with the default N flag when passed non-nil key, delta, and initial' do
+      assert_equal "ma #{key} v D#{delta} J#{initial} N0 MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: initial)
+    end
+
+    it 'excludes the J and N flags when initial is nil and ttl is not set' do
+      assert_equal "ma #{key} v D#{delta} MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: nil)
+    end
+
+    it 'omits the D flag is delta is nil' do
+      assert_equal "ma #{key} v J#{initial} N0 MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: nil, initial: initial)
+    end
+
+    it 'uses ttl for the N flag when ttl passed explicitly along with an initial value' do
+      assert_equal "ma #{key} v D#{delta} J#{initial} N#{ttl} MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: initial,
+                                                                           ttl: ttl)
+    end
+
+    it 'incorporates CAS when passed cas' do
+      assert_equal "ma #{key} v D#{delta} J#{initial} N0 C#{cas} MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: initial,
+                                                                           cas: cas)
+    end
+
+    it 'excludes CAS when CAS is set to 0' do
+      assert_equal "ma #{key} v D#{delta} J#{initial} N0 MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: initial,
+                                                                           cas: 0)
+    end
+
+    it 'includes the N flag when ttl passed explicitly with a nil initial value' do
+      assert_equal "ma #{key} v D#{delta} N#{ttl} MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: nil,
+                                                                           ttl: ttl)
+    end
+
+    it 'swaps from MI to MD when the incr value is explicitly false' do
+      assert_equal "ma #{key} v D#{delta} J#{initial} N0 MD\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: initial,
+                                                                           incr: false)
+    end
+
+    it 'includes the quiet flag when specified' do
+      assert_equal "ma #{key} v D#{delta} J#{initial} N0 q MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: initial,
+                                                                           quiet: true)
+    end
+
+    it 'sets the base64 mode if configured' do
+      assert_equal "ma #{key} v b D#{delta} J#{initial} N0 MI\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.meta_arithmetic(key: key, delta: delta, initial: initial,
+                                                                           base64: true)
+    end
+  end
+
+  describe 'meta_noop' do
+    it 'returns the expected string' do
+      assert_equal "mn\r\n", Dalli::Protocol::Meta::RequestFormatter.meta_noop
+    end
+  end
+
+  describe 'version' do
+    it 'returns the expected string' do
+      assert_equal "version\r\n", Dalli::Protocol::Meta::RequestFormatter.version
+    end
+  end
+
+  describe 'flush' do
+    it 'returns the expected string with no arguments' do
+      assert_equal "flush_all\r\n", Dalli::Protocol::Meta::RequestFormatter.flush
+    end
+
+    it 'adds noreply when quiet is true' do
+      assert_equal "flush_all noreply\r\n", Dalli::Protocol::Meta::RequestFormatter.flush(quiet: true)
+    end
+
+    it 'returns the expected string with a delay argument' do
+      delay = rand(1000..1999)
+
+      assert_equal "flush_all #{delay}\r\n", Dalli::Protocol::Meta::RequestFormatter.flush(delay: delay)
+    end
+
+    it 'santizes the delay argument' do
+      delay = "\nset importantkey 1 1000 8\ninjected"
+
+      assert_equal "flush_all 0\r\n", Dalli::Protocol::Meta::RequestFormatter.flush(delay: delay)
+    end
+
+    it 'adds noreply with a delay and quiet argument' do
+      delay = rand(1000..1999)
+
+      assert_equal "flush_all #{delay} noreply\r\n",
+                   Dalli::Protocol::Meta::RequestFormatter.flush(delay: delay, quiet: true)
+    end
+  end
+end
diff --git a/test/test_binary_protocol.rb b/test/protocol/test_binary.rb
similarity index 83%
rename from test/test_binary_protocol.rb
rename to test/protocol/test_binary.rb
index d376a4e..7a18d27 100644
--- a/test/test_binary_protocol.rb
+++ b/test/protocol/test_binary.rb
@@ -1,12 +1,13 @@
 # frozen_string_literal: true
 
 require 'ostruct'
-require_relative 'helper'
+require_relative '../helper'
 
 describe Dalli::Protocol::Binary do
   describe 'hostname parsing' do
     it 'handles unix socket with no weight' do
       s = Dalli::Protocol::Binary.new('/var/run/memcached/sock')
+
       assert_equal '/var/run/memcached/sock', s.hostname
       assert_equal 1, s.weight
       assert_equal :unix, s.socket_type
@@ -14,6 +15,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles unix socket with a weight' do
       s = Dalli::Protocol::Binary.new('/var/run/memcached/sock:2')
+
       assert_equal '/var/run/memcached/sock', s.hostname
       assert_equal 2, s.weight
       assert_equal :unix, s.socket_type
@@ -21,6 +23,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles no port or weight' do
       s = Dalli::Protocol::Binary.new('localhost')
+
       assert_equal 'localhost', s.hostname
       assert_equal 11_211, s.port
       assert_equal 1, s.weight
@@ -29,6 +32,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles a port, but no weight' do
       s = Dalli::Protocol::Binary.new('localhost:11212')
+
       assert_equal 'localhost', s.hostname
       assert_equal 11_212, s.port
       assert_equal 1, s.weight
@@ -37,6 +41,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles a port and a weight' do
       s = Dalli::Protocol::Binary.new('localhost:11212:2')
+
       assert_equal 'localhost', s.hostname
       assert_equal 11_212, s.port
       assert_equal 2, s.weight
@@ -45,6 +50,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles ipv4 addresses' do
       s = Dalli::Protocol::Binary.new('127.0.0.1')
+
       assert_equal '127.0.0.1', s.hostname
       assert_equal 11_211, s.port
       assert_equal 1, s.weight
@@ -53,6 +59,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles ipv6 addresses' do
       s = Dalli::Protocol::Binary.new('[::1]')
+
       assert_equal '::1', s.hostname
       assert_equal 11_211, s.port
       assert_equal 1, s.weight
@@ -61,6 +68,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles ipv6 addresses with port' do
       s = Dalli::Protocol::Binary.new('[::1]:11212')
+
       assert_equal '::1', s.hostname
       assert_equal 11_212, s.port
       assert_equal 1, s.weight
@@ -69,6 +77,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles ipv6 addresses with port and weight' do
       s = Dalli::Protocol::Binary.new('[::1]:11212:2')
+
       assert_equal '::1', s.hostname
       assert_equal 11_212, s.port
       assert_equal 2, s.weight
@@ -77,6 +86,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles a FQDN' do
       s = Dalli::Protocol::Binary.new('my.fqdn.com')
+
       assert_equal 'my.fqdn.com', s.hostname
       assert_equal 11_211, s.port
       assert_equal 1, s.weight
@@ -85,6 +95,7 @@ describe Dalli::Protocol::Binary do
 
     it 'handles a FQDN with port and weight' do
       s = Dalli::Protocol::Binary.new('my.fqdn.com:11212:2')
+
       assert_equal 'my.fqdn.com', s.hostname
       assert_equal 11_212, s.port
       assert_equal 2, s.weight
@@ -98,24 +109,4 @@ describe Dalli::Protocol::Binary do
       expect(-> { Dalli::Protocol::Binary.new('my.fqdn.com:11212:abc') }).must_raise Dalli::DalliError
     end
   end
-
-  describe 'multi_response_nonblock' do
-    subject { Dalli::Protocol::Binary.new('127.0.0.1') }
-
-    it 'raises NetworkError when called before multi_response_start' do
-      assert_raises Dalli::NetworkError do
-        subject.request(:send_multiget, %w[a b])
-        subject.multi_response_nonblock
-      end
-    end
-
-    it 'raises NetworkError when called after multi_response_abort' do
-      assert_raises Dalli::NetworkError do
-        subject.request(:send_multiget, %w[a b])
-        subject.multi_response_start
-        subject.multi_response_abort
-        subject.multi_response_nonblock
-      end
-    end
-  end
 end
diff --git a/test/protocol/test_server_config_parser.rb b/test/protocol/test_server_config_parser.rb
index 8afeeb7..4e12039 100644
--- a/test/protocol/test_server_config_parser.rb
+++ b/test/protocol/test_server_config_parser.rb
@@ -8,24 +8,22 @@ describe Dalli::Protocol::ServerConfigParser do
     let(:weight) { rand(1..5) }
 
     describe 'when the string is not an memcached URI' do
-      let(:options) { {} }
-
       describe 'tcp' do
         describe 'when the hostname is a domain name' do
           let(:hostname) { "a#{SecureRandom.hex(5)}.b#{SecureRandom.hex(3)}.#{%w[net com edu].sample}" }
 
           it 'parses a hostname by itself' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse(hostname, options), [hostname, 11_211, 1, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse(hostname), [hostname, 11_211, :tcp, 1, {}]
           end
 
           it 'parses hostname with a port' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}", options),
-                         [hostname, port, 1, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}"),
+                         [hostname, port, :tcp, 1, {}]
           end
 
           it 'parses hostname with a port and weight' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}:#{weight}", options),
-                         [hostname, port, weight, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}:#{weight}"),
+                         [hostname, port, :tcp, weight, {}]
           end
         end
 
@@ -33,17 +31,17 @@ describe Dalli::Protocol::ServerConfigParser do
           let(:hostname) { '203.0.113.28' }
 
           it 'parses a hostname by itself' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse(hostname, options), [hostname, 11_211, 1, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse(hostname), [hostname, 11_211, :tcp, 1, {}]
           end
 
           it 'parses hostname with a port' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}", options),
-                         [hostname, port, 1, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}"),
+                         [hostname, port, :tcp, 1, {}]
           end
 
           it 'parses hostname with a port and weight' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}:#{weight}", options),
-                         [hostname, port, weight, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}:#{weight}"),
+                         [hostname, port, :tcp, weight, {}]
           end
         end
 
@@ -51,18 +49,18 @@ describe Dalli::Protocol::ServerConfigParser do
           let(:hostname) { ['2001:db8:ffff:ffff:ffff:ffff:ffff:ffff', '2001:db8::'].sample }
 
           it 'parses a hostname by itself' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse("[#{hostname}]", options),
-                         [hostname, 11_211, 1, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse("[#{hostname}]"),
+                         [hostname, 11_211, :tcp, 1, {}]
           end
 
           it 'parses hostname with a port' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse("[#{hostname}]:#{port}", options),
-                         [hostname, port, 1, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse("[#{hostname}]:#{port}"),
+                         [hostname, port, :tcp, 1, {}]
           end
 
           it 'parses hostname with a port and weight' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse("[#{hostname}]:#{port}:#{weight}", options),
-                         [hostname, port, weight, :tcp, {}]
+            assert_equal Dalli::Protocol::ServerConfigParser.parse("[#{hostname}]:#{port}:#{weight}"),
+                         [hostname, port, :tcp, weight, {}]
           end
         end
       end
@@ -71,18 +69,19 @@ describe Dalli::Protocol::ServerConfigParser do
         let(:hostname) { "/tmp/#{SecureRandom.hex(5)}" }
 
         it 'parses a socket by itself' do
-          assert_equal Dalli::Protocol::ServerConfigParser.parse(hostname, {}), [hostname, nil, 1, :unix, {}]
+          assert_equal Dalli::Protocol::ServerConfigParser.parse(hostname), [hostname, nil, :unix, 1, {}]
         end
 
         it 'parses socket with a weight' do
-          assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{weight}", {}),
-                       [hostname, nil, weight, :unix, {}]
+          assert_equal Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{weight}"),
+                       [hostname, nil, :unix, weight, {}]
         end
 
         it 'produces an error with a port and weight' do
           err = assert_raises Dalli::DalliError do
-            Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}:#{weight}", {})
+            Dalli::Protocol::ServerConfigParser.parse("#{hostname}:#{port}:#{weight}")
           end
+
           assert_equal err.message, "Could not parse hostname #{hostname}:#{port}:#{weight}"
         end
       end
@@ -97,37 +96,18 @@ describe Dalli::Protocol::ServerConfigParser do
       describe 'when the URI is properly formed and includes all values' do
         let(:uri) { "memcached://#{user}:#{password}@#{hostname}:#{port}" }
 
-        describe 'when the client options are empty' do
-          let(:client_options) { {} }
-
-          it 'parses correctly' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse(uri, client_options),
-                         [hostname, port, 1, :tcp, { username: user, password: password }]
-          end
-        end
-
-        describe 'when the client options are not empty' do
-          let(:option_a) { SecureRandom.hex(3) }
-          let(:option_b) { SecureRandom.hex(3) }
-          let(:client_options) { { a: option_a, b: option_b } }
-
-          it 'parses correctly' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse(uri, client_options),
-                         [hostname, port, 1, :tcp, { username: user, password: password, a: option_a, b: option_b }]
-          end
+        it 'parses correctly' do
+          assert_equal Dalli::Protocol::ServerConfigParser.parse(uri),
+                       [hostname, port, :tcp, 1, { username: user, password: password }]
         end
       end
 
       describe 'when the URI does not include a port' do
         let(:uri) { "memcached://#{user}:#{password}@#{hostname}" }
 
-        describe 'when the client options are empty' do
-          let(:client_options) { {} }
-
-          it 'parses correctly' do
-            assert_equal Dalli::Protocol::ServerConfigParser.parse(uri, client_options),
-                         [hostname, 11_211, 1, :tcp, { username: user, password: password }]
-          end
+        it 'parses correctly' do
+          assert_equal Dalli::Protocol::ServerConfigParser.parse(uri),
+                       [hostname, 11_211, :tcp, 1, { username: user, password: password }]
         end
       end
     end
@@ -136,8 +116,9 @@ describe Dalli::Protocol::ServerConfigParser do
       describe 'when the string is empty' do
         it 'produces an error' do
           err = assert_raises Dalli::DalliError do
-            Dalli::Protocol::ServerConfigParser.parse('', {})
+            Dalli::Protocol::ServerConfigParser.parse('')
           end
+
           assert_equal('Could not parse hostname ', err.message)
         end
       end
@@ -145,8 +126,9 @@ describe Dalli::Protocol::ServerConfigParser do
       describe 'when the string starts with a colon' do
         it 'produces an error' do
           err = assert_raises Dalli::DalliError do
-            Dalli::Protocol::ServerConfigParser.parse(':1:2', {})
+            Dalli::Protocol::ServerConfigParser.parse(':1:2')
           end
+
           assert_equal('Could not parse hostname :1:2', err.message)
         end
       end
@@ -154,8 +136,9 @@ describe Dalli::Protocol::ServerConfigParser do
       describe 'when the string ends with a colon' do
         it 'produces an error' do
           err = assert_raises Dalli::DalliError do
-            Dalli::Protocol::ServerConfigParser.parse('abc.com:', {})
+            Dalli::Protocol::ServerConfigParser.parse('abc.com:')
           end
+
           assert_equal('Could not parse hostname abc.com:', err.message)
         end
       end
diff --git a/test/protocol/test_value_compressor.rb b/test/protocol/test_value_compressor.rb
index dc72f89..ed1d2b5 100644
--- a/test/protocol/test_value_compressor.rb
+++ b/test/protocol/test_value_compressor.rb
@@ -11,14 +11,14 @@ describe Dalli::Protocol::ValueCompressor do
         let(:options) { {} }
 
         it 'defaults to true' do
-          assert subject.compress_by_default?
+          assert_predicate subject, :compress_by_default?
         end
 
         describe 'when the deprecated compression option is used' do
           let(:options) { { compression: false } }
 
           it 'overrides the default' do
-            refute subject.compress_by_default?
+            refute_predicate subject, :compress_by_default?
           end
         end
       end
@@ -27,14 +27,14 @@ describe Dalli::Protocol::ValueCompressor do
         let(:options) { { compress: true } }
 
         it 'is true' do
-          assert subject.compress_by_default?
+          assert_predicate subject, :compress_by_default?
         end
 
         describe 'when the deprecated compression option is used' do
           let(:options) { { compress: true, compression: false } }
 
           it 'does not override the explicit compress options' do
-            assert subject.compress_by_default?
+            assert_predicate subject, :compress_by_default?
           end
         end
       end
@@ -43,14 +43,14 @@ describe Dalli::Protocol::ValueCompressor do
         let(:options) { { compress: false } }
 
         it 'is false' do
-          refute subject.compress_by_default?
+          refute_predicate subject, :compress_by_default?
         end
 
         describe 'when the deprecated compression option is used' do
           let(:options) { { compress: false, compression: true } }
 
           it 'does not override the explicit compress options' do
-            refute subject.compress_by_default?
+            refute_predicate subject, :compress_by_default?
           end
         end
       end
@@ -61,7 +61,7 @@ describe Dalli::Protocol::ValueCompressor do
         let(:options) { {} }
 
         it 'defaults to Dalli::Compressor' do
-          assert_equal subject.compressor, ::Dalli::Compressor
+          assert_equal subject.compressor, Dalli::Compressor
         end
       end
 
@@ -116,6 +116,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -127,6 +128,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -138,6 +140,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -153,6 +156,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -164,6 +168,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -175,6 +180,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -195,6 +201,7 @@ describe Dalli::Protocol::ValueCompressor do
               it 'compresses the argument' do
                 compressor.expect :compress, compressed_dummy, [raw_value]
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, compressed_dummy
                 assert_equal newbitflags, (bitflags | 0x2)
                 compressor.verify
@@ -207,6 +214,7 @@ describe Dalli::Protocol::ValueCompressor do
               it 'compresses the argument' do
                 compressor.expect :compress, compressed_dummy, [raw_value]
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, compressed_dummy
                 assert_equal newbitflags, (bitflags | 0x2)
                 compressor.verify
@@ -218,6 +226,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -233,6 +242,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -245,6 +255,7 @@ describe Dalli::Protocol::ValueCompressor do
               it 'compresses the argument' do
                 compressor.expect :compress, compressed_dummy, [raw_value]
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, compressed_dummy
                 assert_equal newbitflags, (bitflags | 0x2)
                 compressor.verify
@@ -256,6 +267,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -280,6 +292,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -291,6 +304,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -302,6 +316,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -317,6 +332,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -328,6 +344,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -339,6 +356,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -359,6 +377,7 @@ describe Dalli::Protocol::ValueCompressor do
               it 'compresses the argument' do
                 compressor.expect :compress, compressed_dummy, [raw_value]
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, compressed_dummy
                 assert_equal newbitflags, (bitflags | 0x2)
                 compressor.verify
@@ -371,6 +390,7 @@ describe Dalli::Protocol::ValueCompressor do
               it 'compresses the argument' do
                 compressor.expect :compress, compressed_dummy, [raw_value]
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, compressed_dummy
                 assert_equal newbitflags, (bitflags | 0x2)
                 compressor.verify
@@ -382,6 +402,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -397,6 +418,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -409,6 +431,7 @@ describe Dalli::Protocol::ValueCompressor do
               it 'compresses the argument' do
                 compressor.expect :compress, compressed_dummy, [raw_value]
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, compressed_dummy
                 assert_equal newbitflags, (bitflags | 0x2)
                 compressor.verify
@@ -420,6 +443,7 @@ describe Dalli::Protocol::ValueCompressor do
 
               it 'does not compress the argument' do
                 val, newbitflags = vc.store(raw_value, req_options, bitflags)
+
                 assert_equal val, raw_value
                 assert_equal newbitflags, bitflags
                 compressor.verify
@@ -443,6 +467,7 @@ describe Dalli::Protocol::ValueCompressor do
       it 'should return the value without decompressing' do
         bitflags = rand(32)
         bitflags &= 0xFFFD
+
         assert_equal(0, bitflags & 0x2)
         assert_equal vc.retrieve(raw_value, bitflags), raw_value
         compressor.verify
@@ -457,6 +482,7 @@ describe Dalli::Protocol::ValueCompressor do
         compressor.expect :decompress, decompressed_dummy, [raw_value]
         bitflags = rand(32)
         bitflags |= 0x2
+
         assert_equal(0x2, bitflags & 0x2)
         assert_equal vc.retrieve(raw_value, bitflags), decompressed_dummy
         compressor.verify
@@ -469,13 +495,15 @@ describe Dalli::Protocol::ValueCompressor do
 
       it 'translates that into a UnmarshalError' do
         error = ->(_arg) { raise Zlib::Error, error_message }
-        ::Dalli::Compressor.stub :decompress, error do
+        Dalli::Compressor.stub :decompress, error do
           bitflags = rand(32)
           bitflags |= 0x2
+
           assert_equal(0x2, bitflags & 0x2)
           exception = assert_raises Dalli::UnmarshalError do
             vc.retrieve(raw_value, bitflags)
           end
+
           assert_equal exception.message, "Unable to uncompress value: #{error_message}"
         end
       end
diff --git a/test/protocol/test_value_marshaller.rb b/test/protocol/test_value_marshaller.rb
index 9822236..ab12210 100644
--- a/test/protocol/test_value_marshaller.rb
+++ b/test/protocol/test_value_marshaller.rb
@@ -31,8 +31,8 @@ describe Dalli::Protocol::ValueMarshaller do
     let(:client_options) { {} }
     let(:val) { SecureRandom.hex(4096) }
     let(:serialized_value) { Marshal.dump(val) }
-    let(:compressed_serialized_value) { ::Dalli::Compressor.compress(serialized_value) }
-    let(:compressed_raw_value) { ::Dalli::Compressor.compress(val) }
+    let(:compressed_serialized_value) { Dalli::Compressor.compress(serialized_value) }
+    let(:compressed_raw_value) { Dalli::Compressor.compress(val) }
     let(:key) { SecureRandom.hex(5) }
 
     describe 'when the bytesize is under value_max_bytes' do
@@ -90,6 +90,7 @@ describe Dalli::Protocol::ValueMarshaller do
             exception = assert_raises Dalli::ValueOverMaxSize do
               marshaller.store(key, val, req_options)
             end
+
             assert_equal "Value for #{key} over max size: #{1024 * 1024} <= #{compressed_serialized_value.size}",
                          exception.message
           end
@@ -122,6 +123,7 @@ describe Dalli::Protocol::ValueMarshaller do
             exception = assert_raises Dalli::ValueOverMaxSize do
               marshaller.store(key, val, req_options)
             end
+
             assert_equal "Value for #{key} over max size: #{1024 * 1024} <= #{compressed_raw_value.size}",
                          exception.message
           end
@@ -159,6 +161,7 @@ describe Dalli::Protocol::ValueMarshaller do
             exception = assert_raises Dalli::ValueOverMaxSize do
               marshaller.store(key, val, req_options)
             end
+
             assert_equal "Value for #{key} over max size: #{value_max_bytes} <= #{compressed_serialized_value.size}",
                          exception.message
           end
@@ -183,6 +186,7 @@ describe Dalli::Protocol::ValueMarshaller do
             exception = assert_raises Dalli::ValueOverMaxSize do
               marshaller.store(key, val, req_options)
             end
+
             assert_equal "Value for #{key} over max size: #{value_max_bytes} <= #{compressed_raw_value.size}",
                          exception.message
           end
@@ -203,8 +207,8 @@ describe Dalli::Protocol::ValueMarshaller do
     let(:marshaller) { Dalli::Protocol::ValueMarshaller.new({}) }
     let(:val) { SecureRandom.hex(4096) }
     let(:serialized_value) { Marshal.dump(val) }
-    let(:compressed_serialized_value) { ::Dalli::Compressor.compress(serialized_value) }
-    let(:compressed_raw_value) { ::Dalli::Compressor.compress(val) }
+    let(:compressed_serialized_value) { Dalli::Compressor.compress(serialized_value) }
+    let(:compressed_raw_value) { Dalli::Compressor.compress(val) }
 
     it 'retrieves the value when the flags indicate the value is both compressed and serialized' do
       assert_equal val, marshaller.retrieve(compressed_serialized_value, 0x3)
diff --git a/test/protocol/test_value_serializer.rb b/test/protocol/test_value_serializer.rb
index 436fc9c..ef73ccc 100644
--- a/test/protocol/test_value_serializer.rb
+++ b/test/protocol/test_value_serializer.rb
@@ -11,7 +11,7 @@ describe Dalli::Protocol::ValueSerializer do
         let(:options) { {} }
 
         it 'defaults to Marshal' do
-          assert_equal subject.serializer, ::Marshal
+          assert_equal subject.serializer, Marshal
         end
       end
 
@@ -41,6 +41,7 @@ describe Dalli::Protocol::ValueSerializer do
       it 'serializes the value' do
         serializer.expect :dump, serialized_dummy, [raw_value]
         val, newbitflags = vs.store(raw_value, req_options, bitflags)
+
         assert_equal val, serialized_dummy
         assert_equal newbitflags, (bitflags | 0x1)
         serializer.verify
@@ -53,6 +54,7 @@ describe Dalli::Protocol::ValueSerializer do
       it 'serializes the value' do
         serializer.expect :dump, serialized_dummy, [raw_value]
         val, newbitflags = vs.store(raw_value, req_options, bitflags)
+
         assert_equal val, serialized_dummy
         assert_equal newbitflags, (bitflags | 0x1)
         serializer.verify
@@ -65,6 +67,7 @@ describe Dalli::Protocol::ValueSerializer do
       it 'serializes the value' do
         serializer.expect :dump, serialized_dummy, [raw_value]
         val, newbitflags = vs.store(raw_value, req_options, bitflags)
+
         assert_equal val, serialized_dummy
         assert_equal newbitflags, (bitflags | 0x1)
         serializer.verify
@@ -76,6 +79,7 @@ describe Dalli::Protocol::ValueSerializer do
 
       it 'does not call the serializer and just converts the input value to a string' do
         val, newbitflags = vs.store(raw_value, req_options, bitflags)
+
         assert_equal val, raw_value.to_s
         assert_equal newbitflags, bitflags
         serializer.verify
@@ -93,6 +97,7 @@ describe Dalli::Protocol::ValueSerializer do
           exception = assert_raises Timeout::Error do
             vs.store(raw_value, req_options, bitflags)
           end
+
           assert_equal exception.message, error_message
         end
       end
@@ -109,6 +114,7 @@ describe Dalli::Protocol::ValueSerializer do
           exception = assert_raises Dalli::MarshalError do
             vs.store(raw_value, req_options, bitflags)
           end
+
           assert_equal exception.message, error_message
         end
       end
@@ -125,6 +131,7 @@ describe Dalli::Protocol::ValueSerializer do
           exception = assert_raises Dalli::MarshalError do
             vs.store(raw_value, req_options, bitflags)
           end
+
           assert_equal exception.message, error_message
         end
       end
@@ -142,6 +149,7 @@ describe Dalli::Protocol::ValueSerializer do
       it 'should return the value without deserializing' do
         bitflags = rand(32)
         bitflags &= 0xFFFE
+
         assert_equal(0, bitflags & 0x1)
         assert_equal vs.retrieve(raw_value, bitflags), raw_value
         serializer.verify
@@ -153,6 +161,7 @@ describe Dalli::Protocol::ValueSerializer do
         serializer.expect :load, deserialized_dummy, [raw_value]
         bitflags = rand(32)
         bitflags |= 0x1
+
         assert_equal(0x1, bitflags & 0x1)
         assert_equal vs.retrieve(raw_value, bitflags), deserialized_dummy
         serializer.verify
@@ -171,6 +180,7 @@ describe Dalli::Protocol::ValueSerializer do
             vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
           end
         end
+
         assert_equal exception.message, "Unable to unmarshal value: #{error_message}"
       end
     end
@@ -187,6 +197,7 @@ describe Dalli::Protocol::ValueSerializer do
             vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
           end
         end
+
         assert_equal exception.message, "Unable to unmarshal value: #{error_message}"
       end
     end
@@ -200,6 +211,7 @@ describe Dalli::Protocol::ValueSerializer do
         exception = assert_raises Dalli::UnmarshalError do
           vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
         end
+
         assert_equal exception.message, "Unable to unmarshal value: #{error_message}"
       end
     end
@@ -213,6 +225,7 @@ describe Dalli::Protocol::ValueSerializer do
         exception = assert_raises Dalli::UnmarshalError do
           vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
         end
+
         assert exception.message.start_with?("Unable to unmarshal value: #{error_message}")
       end
     end
@@ -228,7 +241,8 @@ describe Dalli::Protocol::ValueSerializer do
             vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
           end
         end
-        assert_equal exception.message, error_message
+
+        assert_equal error_message, exception.message
       end
     end
 
@@ -244,7 +258,8 @@ describe Dalli::Protocol::ValueSerializer do
             vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
           end
         end
-        assert_equal exception.message, "Unable to unmarshal value: #{error_message}"
+
+        assert exception.message.start_with?("Unable to unmarshal value: #{error_message}")
       end
     end
 
@@ -259,7 +274,8 @@ describe Dalli::Protocol::ValueSerializer do
             vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
           end
         end
-        assert_equal exception.message, error_message
+
+        assert exception.message.start_with?(error_message)
       end
     end
 
@@ -272,6 +288,7 @@ describe Dalli::Protocol::ValueSerializer do
         exception = assert_raises Dalli::UnmarshalError do
           vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
         end
+
         assert_equal exception.message, "Unable to unmarshal value: #{error_message}"
       end
     end
@@ -285,6 +302,7 @@ describe Dalli::Protocol::ValueSerializer do
         exception = assert_raises Dalli::UnmarshalError do
           vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
         end
+
         assert_equal exception.message, "Unable to unmarshal value: #{error_message}"
       end
     end
@@ -300,6 +318,7 @@ describe Dalli::Protocol::ValueSerializer do
             vs.retrieve(raw_value, Dalli::Protocol::ValueSerializer::FLAG_SERIALIZED)
           end
         end
+
         assert_equal exception.message, error_message
       end
     end
diff --git a/test/test_cas_client.rb b/test/test_cas_client.rb
deleted file mode 100644
index 26209ac..0000000
--- a/test/test_cas_client.rb
+++ /dev/null
@@ -1,109 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'helper'
-
-describe 'Dalli::Cas::Client' do
-  describe 'using a live server' do
-    it 'supports get with CAS' do
-      memcached_persistent do |dc|
-        dc.flush
-
-        expected = { 'blah' => 'blerg!' }
-        get_block_called = false
-        stored_value = stored_cas = nil
-        # Validate call-with-block
-        dc.get_cas('gets_key') do |v, cas|
-          get_block_called = true
-          stored_value = v
-          stored_cas = cas
-        end
-        assert get_block_called
-        assert_nil stored_value
-
-        dc.set('gets_key', expected)
-
-        # Validate call-with-return-value
-        stored_value, stored_cas = dc.get_cas('gets_key')
-        assert_equal stored_value, expected
-        refute_equal(stored_cas, 0)
-      end
-    end
-
-    it 'supports multi-get with CAS' do
-      memcached_persistent do |dc|
-        dc.close
-        dc.flush
-
-        expected_hash = { 'a' => 'foo', 'b' => 123 }
-        expected_hash.each_pair do |k, v|
-          dc.set(k, v)
-        end
-
-        # Invocation without block
-        resp = dc.get_multi_cas(%w[a b c d e f])
-        resp.each_pair do |k, data|
-          value = data.first
-          cas = data[1]
-          assert_equal expected_hash[k], value
-          assert(cas && cas != 0)
-        end
-
-        # Invocation with block
-        dc.get_multi_cas(%w[a b c d e f]) do |k, data|
-          value = data.first
-          cas = data[1]
-          assert_equal expected_hash[k], value
-          assert(cas && cas != 0)
-        end
-      end
-    end
-
-    it 'supports replace-with-CAS operation' do
-      memcached_persistent do |dc|
-        dc.flush
-        cas = dc.set('key', 'value')
-
-        # Accepts CAS, replaces, and returns new CAS
-        cas = dc.replace_cas('key', 'value2', cas)
-        assert cas.is_a?(Integer)
-
-        assert_equal 'value2', dc.get('key')
-      end
-    end
-
-    it 'supports delete with CAS' do
-      memcached_persistent do |dc|
-        cas = dc.set('some_key', 'some_value')
-        dc.delete_cas('some_key', cas)
-        assert_nil dc.get('some_key')
-      end
-    end
-
-    it 'handles CAS round-trip operations' do
-      memcached_persistent do |dc|
-        dc.flush
-
-        expected = { 'blah' => 'blerg!' }
-        dc.set('some_key', expected)
-
-        value, cas = dc.get_cas('some_key')
-        assert_equal value, expected
-        assert(!cas.nil? && cas != 0)
-
-        # Set operation, first with wrong then with correct CAS
-        expected = { 'blah' => 'set succeeded' }
-        refute(dc.set_cas('some_key', expected, cas + 1))
-        assert op_addset_succeeds(cas = dc.set_cas('some_key', expected, cas))
-
-        # Replace operation, first with wrong then with correct CAS
-        expected = { 'blah' => 'replace succeeded' }
-        refute(dc.replace_cas('some_key', expected, cas + 1))
-        assert op_addset_succeeds(cas = dc.replace_cas('some_key', expected, cas))
-
-        # Delete operation, first with wrong then with correct CAS
-        refute(dc.delete_cas('some_key', cas + 1))
-        assert dc.delete_cas('some_key', cas)
-      end
-    end
-  end
-end
diff --git a/test/test_client_options.rb b/test/test_client_options.rb
new file mode 100644
index 0000000..43a6a8e
--- /dev/null
+++ b/test/test_client_options.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require_relative 'helper'
+
+describe 'Dalli client options' do
+  it 'not warn about valid options' do
+    dc = Dalli::Client.new('foo', compress: true)
+    # Rails.logger.expects :warn
+    assert_operator dc.instance_variable_get(:@options), :[], :compress
+  end
+
+  describe 'servers configuration' do
+    it 'default to localhost:11211' do
+      dc = Dalli::Client.new
+      ring = dc.send(:ring)
+      s1 = ring.servers.first.hostname
+
+      assert_equal 1, ring.servers.size
+      dc.close
+
+      dc = Dalli::Client.new('localhost:11211')
+      ring = dc.send(:ring)
+      s2 = ring.servers.first.hostname
+
+      assert_equal 1, ring.servers.size
+      dc.close
+
+      dc = Dalli::Client.new(['localhost:11211'])
+      ring = dc.send(:ring)
+      s3 = ring.servers.first.hostname
+
+      assert_equal 1, ring.servers.size
+      dc.close
+
+      assert_equal '127.0.0.1', s1
+      assert_equal s2, s3
+    end
+
+    it 'accept comma separated string' do
+      dc = Dalli::Client.new('server1.example.com:11211,server2.example.com:11211')
+      ring = dc.send(:ring)
+
+      assert_equal 2, ring.servers.size
+      s1, s2 = ring.servers.map(&:hostname)
+
+      assert_equal 'server1.example.com', s1
+      assert_equal 'server2.example.com', s2
+    end
+
+    it 'accept array of servers' do
+      dc = Dalli::Client.new(['server1.example.com:11211', 'server2.example.com:11211'])
+      ring = dc.send(:ring)
+
+      assert_equal 2, ring.servers.size
+      s1, s2 = ring.servers.map(&:hostname)
+
+      assert_equal 'server1.example.com', s1
+      assert_equal 'server2.example.com', s2
+    end
+
+    it 'raises error when servers is a Hash' do
+      assert_raises ArgumentError do
+        Dalli::Client.new({ hosts: 'server1.example.com' })
+      end
+    end
+  end
+end
diff --git a/test/test_compressor.rb b/test/test_compressor.rb
index 61df7cb..d5560a8 100644
--- a/test/test_compressor.rb
+++ b/test/test_compressor.rb
@@ -1,51 +1,20 @@
 # frozen_string_literal: true
 
 require_relative 'helper'
-require 'json'
 
-class NoopCompressor
-  def self.compress(data)
-    data
+describe 'Dalli::Compressor' do
+  it 'compresses data using Zlib::Deflate' do
+    assert_equal "x\x9CKLJN\x01\x00\x03\xD8\x01\x8B".b,
+                 Dalli::Compressor.compress('abcd')
+    assert_equal "x\x9C+\xC9HU(,\xCDL\xCEVH*\xCA/\xCFSH\xCB\xAFP\xC8*\xCD-(\x06\x00z\x06\t\x83".b,
+                 Dalli::Compressor.compress('the quick brown fox jumps')
   end
 
-  def self.decompress(data)
-    data
-  end
-end
-
-describe 'Compressor' do
-  it 'default to Dalli::Compressor' do
-    memcached(29_199) do |dc|
-      dc.set 1, 2
-      assert_equal Dalli::Compressor, dc.instance_variable_get('@ring').servers.first.compressor
-    end
-  end
-
-  it 'support a custom compressor' do
-    memcached(29_199) do |_dc|
-      memcache = Dalli::Client.new('127.0.0.1:29199', { compressor: NoopCompressor })
-      memcache.set 1, 2
-      begin
-        assert_equal NoopCompressor,
-                     memcache.instance_variable_get('@ring').servers.first.compressor
-
-        memcached(19_127) do |newdc|
-          assert newdc.set('string-test', 'a test string')
-          assert_equal('a test string', newdc.get('string-test'))
-        end
-      end
-    end
-  end
-end
-
-describe 'GzipCompressor' do
-  it 'compress and uncompress data using Zlib::GzipWriter/Reader' do
-    memcached(19_127) do |_dc|
-      memcache = Dalli::Client.new('127.0.0.1:19127', { compress: true, compressor: Dalli::GzipCompressor })
-      data = (0...1025).map { rand(65..90).chr }.join
-      assert memcache.set('test', data)
-      assert_equal(data, memcache.get('test'))
-      assert_equal Dalli::GzipCompressor, memcache.instance_variable_get('@ring').servers.first.compressor
-    end
+  it 'deccompresses data using Zlib::Deflate' do
+    assert_equal('abcd', Dalli::Compressor.decompress("x\x9CKLJN\x01\x00\x03\xD8\x01\x8B"))
+    assert_equal('the quick brown fox jumps',
+                 Dalli::Compressor.decompress(
+                   "x\x9C+\xC9HU(,\xCDL\xCEVH*\xCA/\xCFSH\xCB\xAFP\xC8*\xCD-(\x06\x00z\x06\t\x83"
+                 ))
   end
 end
diff --git a/test/test_dalli.rb b/test/test_dalli.rb
deleted file mode 100644
index 974fa4d..0000000
--- a/test/test_dalli.rb
+++ /dev/null
@@ -1,755 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'helper'
-require 'openssl'
-require 'securerandom'
-
-describe 'Dalli' do
-  describe 'options parsing' do
-    it 'not warn about valid options' do
-      dc = Dalli::Client.new('foo', compress: true)
-      # Rails.logger.expects :warn
-      assert dc.instance_variable_get(:@options)[:compress]
-    end
-
-    it 'raises error with invalid expires_in' do
-      bad_data = [{ bad: 'expires in data' }, Hash, [1, 2, 3]]
-      bad_data.each do |bad|
-        assert_raises ArgumentError do
-          Dalli::Client.new('foo', { expires_in: bad })
-        end
-      end
-    end
-
-    it 'raises error with invalid digest_class' do
-      assert_raises ArgumentError do
-        Dalli::Client.new('foo', { expires_in: 10, digest_class: Object })
-      end
-    end
-
-    it 'opens a standard TCP connection' do
-      memcached_persistent do |dc|
-        server = dc.send(:ring).servers.first
-        sock = Dalli::Socket::TCP.open(server.hostname, server.port, server, server.options)
-        assert_equal Dalli::Socket::TCP, sock.class
-
-        dc.set('abc', 123)
-        assert_equal(123, dc.get('abc'))
-      end
-    end
-
-    it 'opens a SSL TCP connection' do
-      memcached_ssl_persistent do |dc|
-        server = dc.send(:ring).servers.first
-        sock = Dalli::Socket::TCP.open(server.hostname, server.port, server, server.options)
-        assert_equal Dalli::Socket::SSLSocket, sock.class
-
-        dc.set('abc', 123)
-        assert_equal(123, dc.get('abc'))
-      end
-    end
-  end
-
-  describe 'key validation' do
-    it 'not allow blanks, but allows whitespace characters' do
-      memcached_persistent do |dc|
-        dc.set '   ', 1
-        assert_equal 1, dc.get('   ')
-        dc.set "\t", 1
-        assert_equal 1, dc.get("\t")
-        dc.set "\n", 1
-        assert_equal 1, dc.get("\n")
-        assert_raises ArgumentError do
-          dc.set '', 1
-        end
-        assert_raises ArgumentError do
-          dc.set nil, 1
-        end
-      end
-    end
-
-    it 'allow namespace to be a symbol' do
-      memcached_persistent do |_, port|
-        dc = Dalli::Client.new("localhost:#{port}", namespace: :wunderschoen)
-        dc.set 'x' * 251, 1
-        assert_equal(1, dc.get(('x' * 251).to_s))
-      end
-    end
-  end
-
-  describe 'ttl validation' do
-    it 'generated an ArgumentError for ttl that does not support to_i' do
-      memcached_persistent do |dc|
-        assert_raises ArgumentError do
-          dc.set('foo', 'bar', [])
-        end
-      end
-    end
-  end
-
-  it 'default to localhost:11211' do
-    dc = Dalli::Client.new
-    ring = dc.send(:ring)
-    s1 = ring.servers.first.hostname
-    assert_equal 1, ring.servers.size
-    dc.close
-
-    dc = Dalli::Client.new('localhost:11211')
-    ring = dc.send(:ring)
-    s2 = ring.servers.first.hostname
-    assert_equal 1, ring.servers.size
-    dc.close
-
-    dc = Dalli::Client.new(['localhost:11211'])
-    ring = dc.send(:ring)
-    s3 = ring.servers.first.hostname
-    assert_equal 1, ring.servers.size
-    dc.close
-
-    assert_equal '127.0.0.1', s1
-    assert_equal s2, s3
-  end
-
-  it 'accept comma separated string' do
-    dc = Dalli::Client.new('server1.example.com:11211,server2.example.com:11211')
-    ring = dc.send(:ring)
-    assert_equal 2, ring.servers.size
-    s1, s2 = ring.servers.map(&:hostname)
-    assert_equal 'server1.example.com', s1
-    assert_equal 'server2.example.com', s2
-  end
-
-  it 'accept array of servers' do
-    dc = Dalli::Client.new(['server1.example.com:11211', 'server2.example.com:11211'])
-    ring = dc.send(:ring)
-    assert_equal 2, ring.servers.size
-    s1, s2 = ring.servers.map(&:hostname)
-    assert_equal 'server1.example.com', s1
-    assert_equal 'server2.example.com', s2
-  end
-
-  it 'raises error when servers is a Hash' do
-    assert_raises ArgumentError do
-      Dalli::Client.new({ hosts: 'server1.example.com' })
-    end
-  end
-
-  describe 'using a live server' do
-    it 'support get/set' do
-      memcached_persistent do |dc|
-        dc.flush
-
-        val1 = '1234567890' * 999_999
-        dc.set('a', val1)
-        val2 = dc.get('a')
-        assert_equal val1, val2
-
-        assert op_addset_succeeds(dc.set('a', nil))
-        assert_nil dc.get('a')
-      end
-    end
-
-    it 'supports delete' do
-      memcached_persistent do |dc|
-        dc.set('some_key', 'some_value')
-        assert_equal 'some_value', dc.get('some_key')
-
-        dc.delete('some_key')
-        assert_nil dc.get('some_key')
-      end
-    end
-
-    it 'returns nil for nonexist key' do
-      memcached_persistent do |dc|
-        assert_nil dc.get('notexist')
-      end
-    end
-
-    it 'allows "Not found" as value' do
-      memcached_persistent do |dc|
-        dc.set('key1', 'Not found')
-        assert_equal 'Not found', dc.get('key1')
-      end
-    end
-
-    it 'support stats' do
-      memcached_persistent do |dc|
-        # make sure that get_hits would not equal 0
-        dc.set(:a, '1234567890' * 100_000)
-        dc.get(:a)
-
-        stats = dc.stats
-        servers = stats.keys
-        assert(servers.any? do |s|
-          stats[s]['get_hits'].to_i != 0
-        end, 'general stats failed')
-
-        stats_items = dc.stats(:items)
-        servers = stats_items.keys
-        assert(servers.all? do |s|
-          stats_items[s].keys.any? do |key|
-            key =~ /items:[0-9]+:number/
-          end
-        end, 'stats items failed')
-
-        stats_slabs = dc.stats(:slabs)
-        servers = stats_slabs.keys
-        assert(servers.all? do |s|
-          stats_slabs[s].keys.any?('active_slabs')
-        end, 'stats slabs failed')
-
-        # reset_stats test
-        results = dc.reset_stats
-        assert(results.all? { |x| x })
-        stats = dc.stats
-        servers = stats.keys
-
-        # check if reset was performed
-        servers.each do |s|
-          assert_equal 0, dc.stats[s]['get_hits'].to_i
-        end
-      end
-    end
-
-    it 'support the fetch operation' do
-      memcached_persistent do |dc|
-        dc.flush
-
-        expected = { 'blah' => 'blerg!' }
-        executed = false
-        value = dc.fetch('fetch_key') do
-          executed = true
-          expected
-        end
-        assert_equal expected, value
-        assert executed
-
-        executed = false
-        value = dc.fetch('fetch_key') do
-          executed = true
-          expected
-        end
-        assert_equal expected, value
-        refute executed
-      end
-    end
-
-    it 'support the fetch operation with falsey values' do
-      memcached_persistent do |dc|
-        dc.flush
-
-        dc.set('fetch_key', false)
-        res = dc.fetch('fetch_key') { flunk 'fetch block called' }
-        refute res
-      end
-    end
-
-    it 'support the fetch operation with nil values when cache_nils: true' do
-      memcached_persistent(21_345, '', cache_nils: true) do |dc|
-        dc.flush
-
-        dc.set('fetch_key', nil)
-        res = dc.fetch('fetch_key') { flunk 'fetch block called' }
-        assert_nil res
-      end
-
-      memcached_persistent(21_345, '', cache_nils: false) do |dc|
-        dc.flush
-        dc.set('fetch_key', nil)
-        executed = false
-        res = dc.fetch('fetch_key') do
-          executed = true
-          'bar'
-        end
-        assert_equal 'bar', res
-        assert executed
-      end
-    end
-
-    it 'support the cas operation' do
-      memcached_persistent do |dc|
-        dc.flush
-
-        expected = { 'blah' => 'blerg!' }
-
-        resp = dc.cas('cas_key') do |_value|
-          raise('Value it not exist')
-        end
-        assert_nil resp
-
-        mutated = { 'blah' => 'foo!' }
-        dc.set('cas_key', expected)
-        resp = dc.cas('cas_key') do |value|
-          assert_equal expected, value
-          mutated
-        end
-        assert op_cas_succeeds(resp)
-
-        resp = dc.get('cas_key')
-        assert_equal mutated, resp
-      end
-    end
-
-    it 'support the cas! operation' do
-      memcached_persistent do |dc|
-        dc.flush
-
-        mutated = { 'blah' => 'foo!' }
-        resp = dc.cas!('cas_key') do |value|
-          assert_nil value
-          mutated
-        end
-        assert op_cas_succeeds(resp)
-
-        resp = dc.get('cas_key')
-        assert_equal mutated, resp
-      end
-    end
-
-    it 'supports multi-get' do
-      memcached_persistent do |dc|
-        dc.close
-        dc.flush
-        resp = dc.get_multi(%w[a b c d e f])
-        assert_empty(resp)
-
-        dc.set('a', 'foo')
-        dc.set('b', 123)
-        dc.set('c', %w[a b c])
-        # Invocation without block
-        resp = dc.get_multi(%w[a b c d e f])
-        expected_resp = { 'a' => 'foo', 'b' => 123, 'c' => %w[a b c] }
-        assert_equal(expected_resp, resp)
-
-        # Invocation with block
-        dc.get_multi(%w[a b c d e f]) do |k, v|
-          assert(expected_resp.key?(k) && expected_resp[k] == v)
-          expected_resp.delete(k)
-        end
-        assert_empty expected_resp
-
-        # Perform a big multi-get with 1000 elements.
-        arr = []
-        dc.multi do
-          1000.times do |idx|
-            dc.set idx, idx
-            arr << idx
-          end
-        end
-
-        result = dc.get_multi(arr)
-        assert_equal(1000, result.size)
-        assert_equal(50, result['50'])
-      end
-    end
-
-    it 'does not corrupt multiget with errors' do
-      memcached_persistent do |dc|
-        dc.close
-        dc.flush
-        dc.set('a', 'av')
-        dc.set('b', 'bv')
-        assert_equal 'av', dc.get('a')
-        assert_equal 'bv', dc.get('b')
-
-        dc.multi do
-          dc.delete('non_existent_key')
-        end
-        assert_equal 'av', dc.get('a')
-        assert_equal 'bv', dc.get('b')
-      end
-    end
-
-    it 'support raw incr/decr' do
-      memcached_persistent do |client|
-        client.flush
-
-        assert op_addset_succeeds(client.set('fakecounter', 0, 0, raw: true))
-        assert_equal 1, client.incr('fakecounter', 1)
-        assert_equal 2, client.incr('fakecounter', 1)
-        assert_equal 3, client.incr('fakecounter', 1)
-        assert_equal 1, client.decr('fakecounter', 2)
-        assert_equal '1', client.get('fakecounter', raw: true)
-
-        resp = client.incr('mycounter', 0)
-        assert_nil resp
-
-        resp = client.incr('mycounter', 1, 0, 2)
-        assert_equal 2, resp
-        resp = client.incr('mycounter', 1)
-        assert_equal 3, resp
-
-        resp = client.set('rawcounter', 10, 0, raw: true)
-        assert op_cas_succeeds(resp)
-
-        resp = client.get('rawcounter', raw: true)
-        assert_equal '10', resp
-
-        resp = client.incr('rawcounter', 1)
-        assert_equal 11, resp
-      end
-    end
-
-    it 'support incr/decr operations' do
-      memcached_persistent do |dc|
-        dc.flush
-
-        resp = dc.decr('counter', 100, 5, 0)
-        assert_equal 0, resp
-
-        resp = dc.decr('counter', 10)
-        assert_equal 0, resp
-
-        resp = dc.incr('counter', 10)
-        assert_equal 10, resp
-
-        current = 10
-        100.times do |x|
-          resp = dc.incr('counter', 10)
-          assert_equal current + ((x + 1) * 10), resp
-        end
-
-        resp = dc.decr('10billion', 0, 5, 10)
-        # go over the 32-bit mark to verify proper (un)packing
-        resp = dc.incr('10billion', 10_000_000_000)
-        assert_equal 10_000_000_010, resp
-
-        resp = dc.decr('10billion', 1)
-        assert_equal 10_000_000_009, resp
-
-        resp = dc.decr('10billion', 0)
-        assert_equal 10_000_000_009, resp
-
-        resp = dc.incr('10billion', 0)
-        assert_equal 10_000_000_009, resp
-
-        assert_nil dc.incr('DNE', 10)
-        assert_nil dc.decr('DNE', 10)
-
-        resp = dc.incr('big', 100, 5, 0xFFFFFFFFFFFFFFFE)
-        assert_equal 0xFFFFFFFFFFFFFFFE, resp
-        resp = dc.incr('big', 1)
-        assert_equal 0xFFFFFFFFFFFFFFFF, resp
-
-        # rollover the 64-bit value, we'll get something undefined.
-        resp = dc.incr('big', 1)
-        refute_equal 0x10000000000000000, resp
-        dc.reset
-      end
-    end
-
-    it 'support the append and prepend operations' do
-      memcached_persistent do |dc|
-        dc.flush
-        assert op_addset_succeeds(dc.set('456', 'xyz', 0, raw: true))
-        assert dc.prepend('456', '0')
-        assert dc.append('456', '9')
-        assert_equal '0xyz9', dc.get('456', raw: true)
-        assert_equal '0xyz9', dc.get('456')
-
-        refute dc.append('nonexist', 'abc')
-        refute dc.prepend('nonexist', 'abc')
-      end
-    end
-
-    it 'supports replace operation' do
-      memcached_persistent do |dc|
-        dc.flush
-        dc.set('key', 'value')
-        assert op_replace_succeeds(dc.replace('key', 'value2'))
-
-        assert_equal 'value2', dc.get('key')
-      end
-    end
-
-    it 'support touch operation' do
-      memcached_persistent do |dc|
-        dc.flush
-        dc.set 'key', 'value'
-        assert dc.touch('key', 10)
-        assert dc.touch('key')
-        assert_equal 'value', dc.get('key')
-        assert_nil dc.touch('notexist')
-      rescue Dalli::DalliError => e
-        # This will happen when memcached is in lesser version than 1.4.8
-        assert_equal 'Response error 129: Unknown command', e.message
-      end
-    end
-
-    it 'support gat operation' do
-      memcached_persistent do |dc|
-        dc.flush
-        dc.set 'key', 'value'
-        assert_equal 'value', dc.gat('key', 10)
-        assert_equal 'value', dc.gat('key')
-        assert_nil dc.gat('notexist', 10)
-      rescue Dalli::DalliError => e
-        # This will happen when memcached is in lesser version than 1.4.8
-        assert_equal 'Response error 129: Unknown command', e.message
-      end
-    end
-
-    it 'support version operation' do
-      memcached_persistent do |dc|
-        v = dc.version
-        servers = v.keys
-        assert(servers.any? do |s|
-          !v[s].nil?
-        end, 'version failed')
-      end
-    end
-
-    it 'allow TCP connections to be configured for keepalive' do
-      memcached_persistent do |_, port|
-        dc = Dalli::Client.new("localhost:#{port}", keepalive: true)
-        dc.set(:a, 1)
-        ring = dc.send(:ring)
-        server = ring.servers.first
-        socket = server.instance_variable_get('@sock')
-
-        optval = socket.getsockopt(Socket::SOL_SOCKET, Socket::SO_KEEPALIVE)
-        optval = optval.unpack 'i'
-
-        refute_equal(optval[0], 0)
-      end
-    end
-
-    it 'pass a simple smoke test' do
-      memcached_persistent do |dc, port|
-        resp = dc.flush
-        refute_nil resp
-        assert_equal [true, true], resp
-
-        assert op_addset_succeeds(dc.set(:foo, 'bar'))
-        assert_equal 'bar', dc.get(:foo)
-
-        resp = dc.get('123')
-        assert_nil resp
-
-        assert op_addset_succeeds(dc.set('123', 'xyz'))
-
-        resp = dc.get('123')
-        assert_equal 'xyz', resp
-
-        assert op_addset_succeeds(dc.set('123', 'abc'))
-
-        dc.prepend('123', '0')
-        dc.append('123', '0')
-
-        assert_raises Dalli::UnmarshalError do
-          resp = dc.get('123')
-        end
-
-        dc.close
-        dc = nil
-
-        dc = Dalli::Client.new("localhost:#{port}", digest_class: ::OpenSSL::Digest::SHA1)
-
-        assert op_addset_succeeds(dc.set('456', 'xyz', 0, raw: true))
-
-        resp = dc.prepend '456', '0'
-        assert resp
-
-        resp = dc.append '456', '9'
-        assert resp
-
-        resp = dc.get('456', raw: true)
-        assert_equal '0xyz9', resp
-
-        assert op_addset_succeeds(dc.set('456', false))
-
-        resp = dc.get('456')
-        refute resp
-
-        resp = dc.stats
-        assert_equal Hash, resp.class
-
-        dc.close
-      end
-    end
-
-    it 'pass a simple smoke test on unix socket' do
-      memcached_persistent(MemcachedMock::UNIX_SOCKET_PATH) do |dc, path|
-        resp = dc.flush
-        refute_nil resp
-        assert_equal [true], resp
-
-        assert op_addset_succeeds(dc.set(:foo, 'bar'))
-        assert_equal 'bar', dc.get(:foo)
-
-        resp = dc.get('123')
-        assert_nil resp
-
-        assert op_addset_succeeds(dc.set('123', 'xyz'))
-
-        resp = dc.get('123')
-        assert_equal 'xyz', resp
-
-        assert op_addset_succeeds(dc.set('123', 'abc'))
-
-        dc.prepend('123', '0')
-        dc.append('123', '0')
-
-        assert_raises Dalli::UnmarshalError do
-          resp = dc.get('123')
-        end
-
-        dc.close
-        dc = nil
-
-        dc = Dalli::Client.new(path)
-
-        assert op_addset_succeeds(dc.set('456', 'xyz', 0, raw: true))
-
-        resp = dc.prepend '456', '0'
-        assert resp
-
-        resp = dc.append '456', '9'
-        assert resp
-
-        resp = dc.get('456', raw: true)
-        assert_equal '0xyz9', resp
-
-        assert op_addset_succeeds(dc.set('456', false))
-
-        resp = dc.get('456')
-        refute resp
-
-        resp = dc.stats
-        assert_equal Hash, resp.class
-
-        dc.close
-      end
-    end
-
-    it 'support multithreaded access' do
-      memcached_persistent do |cache|
-        cache.flush
-        workers = []
-
-        cache.set('f', 'zzz')
-        assert op_cas_succeeds((cache.cas('f') do |value|
-          value << 'z'
-        end))
-        assert_equal 'zzzz', cache.get('f')
-
-        # Have a bunch of threads perform a bunch of operations at the same time.
-        # Verify the result of each operation to ensure the request and response
-        # are not intermingled between threads.
-        10.times do
-          workers << Thread.new do
-            100.times do
-              cache.set('a', 9)
-              cache.set('b', 11)
-              cache.incr('cat', 10, 0, 10)
-              cache.set('f', 'zzz')
-              res = cache.cas('f') do |value|
-                value << 'z'
-              end
-              refute_nil res
-              refute cache.add('a', 11)
-              assert_equal({ 'a' => 9, 'b' => 11 }, cache.get_multi(%w[a b]))
-              inc = cache.incr('cat', 10)
-              assert_equal 0, inc % 5
-              cache.decr('cat', 5)
-              assert_equal 11, cache.get('b')
-
-              assert_equal %w[a b], cache.get_multi('a', 'b', 'c').keys.sort
-            end
-          end
-        end
-
-        workers.each(&:join)
-        cache.flush
-      end
-    end
-
-    it 'handle namespaced keys' do
-      memcached_persistent do |_, port|
-        dc = Dalli::Client.new("localhost:#{port}", namespace: 'a')
-        dc.set('namespaced', 1)
-        dc2 = Dalli::Client.new("localhost:#{port}", namespace: 'b')
-        dc2.set('namespaced', 2)
-        assert_equal 1, dc.get('namespaced')
-        assert_equal 2, dc2.get('namespaced')
-      end
-    end
-
-    it 'handle nil namespace' do
-      memcached_persistent do |_, port|
-        dc = Dalli::Client.new("localhost:#{port}", namespace: nil)
-        dc.set('key', 1)
-        assert_equal 1, dc.get('key')
-      end
-    end
-
-    it 'truncate cache keys that are too long' do
-      memcached_persistent do |_, port|
-        dc = Dalli::Client.new("localhost:#{port}", namespace: 'some:namspace')
-        key = 'this cache key is far too long so it must be hashed and truncated and stuff' * 10
-        value = 'some value'
-        assert op_addset_succeeds(dc.set(key, value))
-        assert_equal value, dc.get(key)
-      end
-    end
-
-    it 'handle namespaced keys in multi_get' do
-      memcached_persistent do |_, port|
-        dc = Dalli::Client.new("localhost:#{port}", namespace: 'a')
-        dc.set('a', 1)
-        dc.set('b', 2)
-        assert_equal({ 'a' => 1, 'b' => 2 }, dc.get_multi('a', 'b'))
-      end
-    end
-
-    it 'handle special Regexp characters in namespace with get_multi' do
-      memcached_persistent do |_, port|
-        # /(?!)/ is a contradictory PCRE and should never be able to match
-        dc = Dalli::Client.new("localhost:#{port}", namespace: '(?!)')
-        dc.set('a', 1)
-        dc.set('b', 2)
-        assert_equal({ 'a' => 1, 'b' => 2 }, dc.get_multi('a', 'b'))
-      end
-    end
-
-    it 'handle application marshalling issues' do
-      memcached_persistent do |dc|
-        with_nil_logger do
-          assert_raises Dalli::MarshalError do
-            dc.set('a', proc { true })
-          end
-        end
-      end
-    end
-
-    describe 'with compression' do
-      it 'does not allow large values' do
-        memcached_persistent do |dc|
-          value = SecureRandom.random_bytes((1024 * 1024) + 30_000)
-          with_nil_logger do
-            assert_raises Dalli::ValueOverMaxSize do
-              dc.set('verylarge', value)
-            end
-          end
-        end
-      end
-
-      it 'allow large values to be set' do
-        memcached_persistent do |dc|
-          value = '0' * 1024 * 1024
-          assert dc.set('verylarge', value, nil, compress: true)
-        end
-      end
-    end
-
-    it 'supports the with method' do
-      memcached_persistent do |dc|
-        dc.with { |c| c.set('some_key', 'some_value') }
-        assert_equal 'some_value', dc.get('some_key')
-
-        dc.with { |c| c.delete('some_key') }
-        assert_nil dc.get('some_key')
-      end
-    end
-  end
-end
diff --git a/test/test_digest_class.rb b/test/test_digest_class.rb
new file mode 100644
index 0000000..3c63487
--- /dev/null
+++ b/test/test_digest_class.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require_relative 'helper'
+
+describe 'Digest class' do
+  it 'raises error with invalid digest_class' do
+    assert_raises ArgumentError do
+      Dalli::Client.new('foo', { expires_in: 10, digest_class: Object })
+    end
+  end
+end
diff --git a/test/test_encoding.rb b/test/test_encoding.rb
deleted file mode 100644
index 9d89079..0000000
--- a/test/test_encoding.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'helper'
-
-describe 'Encoding' do
-  describe 'using a live server' do
-    it 'support i18n content' do
-      memcached_persistent do |dc|
-        key = 'foo'
-        utf_key = utf8 = 'ƒ©åÍÎ'
-
-        assert dc.set(key, utf8)
-        assert_equal utf8, dc.get(key)
-
-        dc.set(utf_key, utf8)
-        assert_equal utf8, dc.get(utf_key)
-      end
-    end
-
-    it 'support content expiry' do
-      memcached_persistent do |dc|
-        key = 'foo'
-        assert dc.set(key, 'bar', 1)
-        assert_equal 'bar', dc.get(key)
-        sleep 1.2
-        assert_nil dc.get(key)
-      end
-    end
-  end
-end
diff --git a/test/test_failover.rb b/test/test_failover.rb
deleted file mode 100644
index 7d83d91..0000000
--- a/test/test_failover.rb
+++ /dev/null
@@ -1,161 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'helper'
-
-describe 'failover' do
-  # Timeouts on JRuby work differently and aren't firing, meaning we're
-  # not testing the condition
-  unless defined? JRUBY_VERSION
-    describe 'timeouts' do
-      it 'not lead to corrupt sockets' do
-        memcached_persistent do |dc|
-          value = { test: '123' }
-          begin
-            Timeout.timeout 0.01 do
-              start_time = Time.now
-              10_000.times do
-                dc.set('test_123', value)
-              end
-              flunk("Did not timeout in #{Time.now - start_time}")
-            end
-          rescue Timeout::Error
-            # Ignore expected timeout
-          end
-
-          assert_equal(value, dc.get('test_123'))
-        end
-      end
-    end
-  end
-
-  describe 'assuming some bad servers' do
-    it 'silently reconnect if server hiccups' do
-      server_port = 30_124
-      memcached_persistent(server_port) do |dc, port|
-        dc.set 'foo', 'bar'
-        foo = dc.get 'foo'
-        assert_equal('bar', foo)
-
-        memcached_kill(port)
-        memcached_persistent(port) do
-          foo = dc.get 'foo'
-          assert_nil foo
-
-          memcached_kill(port)
-        end
-      end
-    end
-
-    it 'reconnects if server idles the connection' do
-      port1 = 32_112
-      port2 = 37_887
-
-      memcached(port1, '-o idle_timeout=1') do |_, first_port|
-        memcached(port2, '-o idle_timeout=1') do |_, second_port|
-          dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
-          dc.set 'foo', 'bar'
-          dc.set 'foo2', 'bar2'
-          foo = dc.get_multi 'foo', 'foo2'
-          assert_equal({ 'foo' => 'bar', 'foo2' => 'bar2' }, foo)
-
-          # wait for socket to expire and get cleaned up
-          sleep 5
-
-          foo = dc.get_multi 'foo', 'foo2'
-          assert_equal({ 'foo' => 'bar', 'foo2' => 'bar2' }, foo)
-        end
-      end
-    end
-
-    it 'handle graceful failover' do
-      port1 = 31_777
-      port2 = 32_113
-      memcached_persistent(port1) do |_first_dc, first_port|
-        memcached_persistent(port2) do |_second_dc, second_port|
-          dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
-          dc.set 'foo', 'bar'
-          foo = dc.get 'foo'
-          assert_equal('bar', foo)
-
-          memcached_kill(first_port)
-
-          dc.set 'foo', 'bar'
-          foo = dc.get 'foo'
-          assert_equal('bar', foo)
-
-          memcached_kill(second_port)
-
-          assert_raises Dalli::RingError, message: 'No server available' do
-            dc.set 'foo', 'bar'
-          end
-        end
-      end
-    end
-
-    it 'handle them gracefully in get_multi' do
-      port1 = 32_971
-      port2 = 34_312
-      memcached_persistent(port1) do |_first_dc, first_port|
-        memcached(port2) do |_second_dc, second_port|
-          dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
-          dc.set 'a', 'a1'
-          result = dc.get_multi ['a']
-          assert_equal({ 'a' => 'a1' }, result)
-
-          memcached_kill(first_port)
-
-          result = dc.get_multi ['a']
-          assert_equal({ 'a' => 'a1' }, result)
-        end
-      end
-    end
-
-    it 'handle graceful failover in get_multi' do
-      port1 = 34_541
-      port2 = 33_044
-      memcached_persistent(port1) do |_first_dc, first_port|
-        memcached_persistent(port2) do |_second_dc, second_port|
-          dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
-          dc.set 'foo', 'foo1'
-          dc.set 'bar', 'bar1'
-          result = dc.get_multi %w[foo bar]
-          assert_equal({ 'foo' => 'foo1', 'bar' => 'bar1' }, result)
-
-          memcached_kill(first_port)
-
-          dc.set 'foo', 'foo1'
-          dc.set 'bar', 'bar1'
-          result = dc.get_multi %w[foo bar]
-          assert_equal({ 'foo' => 'foo1', 'bar' => 'bar1' }, result)
-
-          memcached_kill(second_port)
-
-          result = dc.get_multi %w[foo bar]
-          assert_empty(result)
-        end
-      end
-    end
-
-    it 'stats it still properly report' do
-      port1 = 34_547
-      port2 = 33_219
-      memcached_persistent(port1) do |_first_dc, first_port|
-        memcached_persistent(port2) do |_second_dc, second_port|
-          dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
-          result = dc.stats
-          assert_instance_of Hash, result["localhost:#{first_port}"]
-          assert_instance_of Hash, result["localhost:#{second_port}"]
-
-          memcached_kill(first_port)
-
-          dc = Dalli::Client.new ["localhost:#{first_port}", "localhost:#{second_port}"]
-          result = dc.stats
-          assert_instance_of NilClass, result["localhost:#{first_port}"]
-          assert_instance_of Hash, result["localhost:#{second_port}"]
-
-          memcached_kill(second_port)
-        end
-      end
-    end
-  end
-end
diff --git a/test/test_key_manager.rb b/test/test_key_manager.rb
index 47ea2ae..17c4404 100644
--- a/test/test_key_manager.rb
+++ b/test/test_key_manager.rb
@@ -11,16 +11,16 @@ describe 'KeyManager' do
         let(:options) { {} }
 
         it 'uses Digest::MD5 as a default' do
-          assert_equal ::Digest::MD5, key_manager.digest_class
+          assert_equal Digest::MD5, key_manager.digest_class
         end
       end
 
       describe 'when there is an explicit digest_class parameter provided' do
         describe 'and the class implements hexdigest' do
-          let(:options) { { digest_class: ::Digest::SHA2 } }
+          let(:options) { { digest_class: Digest::SHA2 } }
 
           it 'uses the specified argument' do
-            assert_equal ::Digest::SHA2, key_manager.digest_class
+            assert_equal Digest::SHA2, key_manager.digest_class
           end
         end
 
@@ -31,6 +31,7 @@ describe 'KeyManager' do
             err = assert_raises ArgumentError do
               key_manager
             end
+
             assert_equal 'The digest_class object must respond to the hexdigest method', err.message
           end
         end
@@ -66,13 +67,34 @@ describe 'KeyManager' do
       end
 
       describe 'when there is a Proc provided as a namespace parameter' do
-        let(:options) { { namespace: namespace_as_symbol } }
+        let(:options) { { namespace: namespace_as_proc } }
         let(:namespace_as_proc) { proc { namespace_as_symbol } }
         let(:namespace_as_symbol) { namespace_as_s.to_sym }
         let(:namespace_as_s) { SecureRandom.hex(5) }
 
-        it 'the namespace is the stringified symbol' do
-          assert_equal namespace_as_s, key_manager.namespace
+        it 'the namespace is the proc' do
+          assert_equal namespace_as_proc, key_manager.namespace
+        end
+
+        it 'the evaluated namespace is the stringified symbol' do
+          assert_equal namespace_as_s, key_manager.evaluate_namespace
+        end
+      end
+
+      describe 'when the namespace Proc returns dynamic results' do
+        count = 0
+
+        let(:options) { { namespace: namespace_as_proc } }
+        let(:namespace_as_proc) do
+          proc { count += 1 }
+        end
+
+        it 'evaluates the namespace proc every time we need it' do
+          assert_equal 0, count
+          assert_equal '1', key_manager.evaluate_namespace
+          assert_equal(/\A2:/, key_manager.namespace_regexp)
+          assert_equal '3', key_manager.evaluate_namespace
+          assert_equal '4:test', key_manager.key_with_namespace('test')
         end
       end
     end
@@ -82,7 +104,7 @@ describe 'KeyManager' do
     subject { key_manager.validate_key(key) }
 
     describe 'when there is no namespace' do
-      let(:key_manager) { ::Dalli::KeyManager.new(options) }
+      let(:key_manager) { Dalli::KeyManager.new(options) }
       let(:options) { {} }
 
       describe 'when the key is nil' do
@@ -92,6 +114,7 @@ describe 'KeyManager' do
           err = assert_raises ArgumentError do
             subject
           end
+
           assert_equal 'key cannot be blank', err.message
         end
       end
@@ -103,6 +126,7 @@ describe 'KeyManager' do
           err = assert_raises ArgumentError do
             subject
           end
+
           assert_equal 'key cannot be blank', err.message
         end
       end
@@ -133,7 +157,7 @@ describe 'KeyManager' do
         let(:key) { Array.new(keylen) { alphanum.sample }.join }
 
         describe 'when there is no digest_class parameter' do
-          let(:truncated_key) { "#{key[0, 212]}:md5:#{::Digest::MD5.hexdigest(key)}" }
+          let(:truncated_key) { "#{key[0, 212]}:md5:#{Digest::MD5.hexdigest(key)}" }
 
           it 'returns the truncated key' do
             assert_equal 249, subject.length
@@ -142,8 +166,8 @@ describe 'KeyManager' do
         end
 
         describe 'when there is a custom digest_class parameter' do
-          let(:options) { { digest_class: ::Digest::SHA2 } }
-          let(:truncated_key) { "#{key[0, 180]}:md5:#{::Digest::SHA2.hexdigest(key)}" }
+          let(:options) { { digest_class: Digest::SHA2 } }
+          let(:truncated_key) { "#{key[0, 180]}:md5:#{Digest::SHA2.hexdigest(key)}" }
 
           it 'returns the truncated key' do
             assert_equal 249, subject.length
@@ -154,7 +178,7 @@ describe 'KeyManager' do
     end
 
     describe 'when there is a namespace' do
-      let(:key_manager) { ::Dalli::KeyManager.new(options) }
+      let(:key_manager) { Dalli::KeyManager.new(options) }
       let(:half_namespace_len) { rand(1..5) }
       let(:namespace_as_s) { SecureRandom.hex(half_namespace_len) }
       let(:options) { { namespace: namespace_as_s } }
@@ -166,6 +190,7 @@ describe 'KeyManager' do
           err = assert_raises ArgumentError do
             subject
           end
+
           assert_equal 'key cannot be blank', err.message
         end
       end
@@ -177,6 +202,7 @@ describe 'KeyManager' do
           err = assert_raises ArgumentError do
             subject
           end
+
           assert_equal 'key cannot be blank', err.message
         end
       end
@@ -191,7 +217,7 @@ describe 'KeyManager' do
       end
 
       describe 'when the key with namespace is shorter than 250 characters' do
-        let(:keylen) { rand(250 - (2 * half_namespace_len)) + 1 }
+        let(:keylen) { rand(250 - (half_namespace_len * 2)) + 1 }
         let(:alphanum) { [('a'..'z').to_a, ('A'..'Z').to_a, ('0'..'9').to_a].flatten }
         let(:key) { Array.new(keylen) { alphanum.sample }.join }
 
@@ -202,14 +228,14 @@ describe 'KeyManager' do
       end
 
       describe 'when the key with namespace is longer than 250 characters' do
-        let(:keylen) { rand(251..500) - (2 * half_namespace_len) }
+        let(:keylen) { rand(251..500) - (half_namespace_len * 2) }
         let(:alphanum) { [('a'..'z').to_a, ('A'..'Z').to_a, ('0'..'9').to_a].flatten }
         let(:key) { Array.new(keylen) { alphanum.sample }.join }
 
         describe 'when there is no digest_class parameter' do
-          let(:key_prefix) { key[0, 212 - (2 * half_namespace_len)] }
+          let(:key_prefix) { key[0, 212 - (half_namespace_len * 2)] }
           let(:truncated_key) do
-            "#{namespace_as_s}:#{key_prefix}:md5:#{::Digest::MD5.hexdigest("#{namespace_as_s}:#{key}")}"
+            "#{namespace_as_s}:#{key_prefix}:md5:#{Digest::MD5.hexdigest("#{namespace_as_s}:#{key}")}"
           end
 
           it 'returns the truncated key' do
@@ -219,10 +245,10 @@ describe 'KeyManager' do
         end
 
         describe 'when there is a custom digest_class parameter' do
-          let(:options) { { digest_class: ::Digest::SHA2, namespace: namespace_as_s } }
-          let(:key_prefix) { key[0, 180 - (2 * half_namespace_len)] }
+          let(:options) { { digest_class: Digest::SHA2, namespace: namespace_as_s } }
+          let(:key_prefix) { key[0, 180 - (half_namespace_len * 2)] }
           let(:truncated_key) do
-            "#{namespace_as_s}:#{key_prefix}:md5:#{::Digest::SHA2.hexdigest("#{namespace_as_s}:#{key}")}"
+            "#{namespace_as_s}:#{key_prefix}:md5:#{Digest::SHA2.hexdigest("#{namespace_as_s}:#{key}")}"
           end
 
           it 'returns the truncated key' do
@@ -236,7 +262,7 @@ describe 'KeyManager' do
 
   describe 'key_with_namespace' do
     let(:raw_key) { SecureRandom.hex(10) }
-    let(:key_manager) { ::Dalli::KeyManager.new(options) }
+    let(:key_manager) { Dalli::KeyManager.new(options) }
     subject { key_manager.key_with_namespace(raw_key) }
 
     describe 'without namespace' do
@@ -258,7 +284,7 @@ describe 'KeyManager' do
   end
 
   describe 'key_without_namespace' do
-    let(:key_manager) { ::Dalli::KeyManager.new(options) }
+    let(:key_manager) { Dalli::KeyManager.new(options) }
     subject { key_manager.key_without_namespace(raw_key) }
 
     describe 'without namespace' do
diff --git a/test/test_network.rb b/test/test_network.rb
deleted file mode 100644
index 1cc64f7..0000000
--- a/test/test_network.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'helper'
-
-describe 'Network' do
-  describe 'assuming a bad network' do
-    it 'handle no server available' do
-      assert_raises Dalli::RingError, message: 'No server available' do
-        dc = Dalli::Client.new 'localhost:19333'
-        dc.get 'foo'
-      end
-    end
-
-    describe 'with a fake server' do
-      it 'handle connection reset' do
-        memcached_mock(->(sock) { sock.close }) do
-          assert_raises Dalli::RingError, message: 'No server available' do
-            dc = Dalli::Client.new('localhost:19123')
-            dc.get('abc')
-          end
-        end
-      end
-
-      it 'handle connection reset with unix socket' do
-        socket_path = MemcachedMock::UNIX_SOCKET_PATH
-        memcached_mock(->(sock) { sock.close }, :start_unix, socket_path) do
-          assert_raises Dalli::RingError, message: 'No server available' do
-            dc = Dalli::Client.new(socket_path)
-            dc.get('abc')
-          end
-        end
-      end
-
-      it 'handle malformed response' do
-        memcached_mock(->(sock) { sock.write('123') }) do
-          assert_raises Dalli::RingError, message: 'No server available' do
-            dc = Dalli::Client.new('localhost:19123')
-            dc.get('abc')
-          end
-        end
-      end
-
-      it 'handle connect timeouts' do
-        memcached_mock(lambda { |sock|
-                         sleep(0.6)
-                         sock.close
-                       }, :delayed_start) do
-          assert_raises Dalli::RingError, message: 'No server available' do
-            dc = Dalli::Client.new('localhost:19123')
-            dc.get('abc')
-          end
-        end
-      end
-
-      it 'handle read timeouts' do
-        memcached_mock(lambda { |sock|
-                         sleep(0.6)
-                         sock.write('giraffe')
-                       }) do
-          assert_raises Dalli::RingError, message: 'No server available' do
-            dc = Dalli::Client.new('localhost:19123')
-            dc.get('abc')
-          end
-        end
-      end
-    end
-  end
-end
diff --git a/test/test_rack_session.rb b/test/test_rack_session.rb
index 24fed73..acb3de0 100644
--- a/test/test_rack_session.rb
+++ b/test/test_rack_session.rb
@@ -2,13 +2,14 @@
 
 require_relative 'helper'
 
+require 'json'
 require 'rack/session/dalli'
 require 'rack/lint'
 require 'rack/mock'
 describe Rack::Session::Dalli do
   before do
     @port = 19_129
-    memcached_persistent(@port)
+    memcached_persistent(:binary, @port)
     Rack::Session::Dalli::DEFAULT_DALLI_OPTIONS[:memcache_server] = "localhost:#{@port}"
 
     # test memcache connection
@@ -53,16 +54,16 @@ describe Rack::Session::Dalli do
   let(:incrementor) { Rack::Lint.new(incrementor_proc) }
 
   it 'faults on no connection' do
+    rsd = Rack::Session::Dalli.new(incrementor, memcache_server: 'nosuchserver')
     assert_raises Dalli::RingError do
-      rsd = Rack::Session::Dalli.new(incrementor, memcache_server: 'nosuchserver')
-      rsd.pool.with { |c| c.set('ping', '') }
+      rsd.data.with { |c| c.set('ping', '') }
     end
   end
 
   it 'connects to existing server' do
+    rsd = Rack::Session::Dalli.new(incrementor, namespace: 'test:rack:session')
     assert_silent do
-      rsd = Rack::Session::Dalli.new(incrementor, namespace: 'test:rack:session')
-      rsd.pool.with { |c| c.set('ping', '') }
+      rsd.data.with { |c| c.set('ping', '') }
     end
   end
 
@@ -73,8 +74,9 @@ describe Rack::Session::Dalli do
     }
 
     rsd = Rack::Session::Dalli.new(incrementor, opts)
-    assert_equal(opts[:namespace], rsd.pool.with { |c| c.instance_eval { @options[:namespace] } })
-    assert_equal(opts[:compression_min_size], rsd.pool.with { |c| c.instance_eval { @options[:compression_min_size] } })
+
+    assert_equal(opts[:namespace], rsd.data.with { |c| c.instance_eval { @options[:namespace] } })
+    assert_equal(opts[:compression_min_size], rsd.data.with { |c| c.instance_eval { @options[:compression_min_size] } })
   end
 
   it 'rejects a :cache option' do
@@ -87,6 +89,7 @@ describe Rack::Session::Dalli do
 
   it 'generates sids without an existing Dalli::Client' do
     rsd = Rack::Session::Dalli.new(incrementor)
+
     assert rsd.send :generate_sid
   end
 
@@ -98,8 +101,9 @@ describe Rack::Session::Dalli do
 
     with_connectionpool do
       rsd = Rack::Session::Dalli.new(incrementor, opts)
-      assert_equal 10, rsd.pool.available
-      rsd.pool.with do |mc|
+
+      assert_equal 10, rsd.data.available
+      rsd.data.with do |mc|
         assert_equal(opts[:namespace], mc.instance_eval { @options[:namespace] })
       end
     end
@@ -108,6 +112,7 @@ describe Rack::Session::Dalli do
   it 'creates a new cookie' do
     rsd = Rack::Session::Dalli.new(incrementor)
     res = Rack::MockRequest.new(rsd).get('/')
+
     assert_includes res['Set-Cookie'], "#{session_key}="
     assert_equal '{"counter"=>1}', res.body
   end
@@ -117,6 +122,7 @@ describe Rack::Session::Dalli do
     req = Rack::MockRequest.new(rsd)
     res = req.get('/')
     cookie = res['Set-Cookie']
+
     assert_equal '{"counter"=>2}', req.get('/', 'HTTP_COOKIE' => cookie).body
     assert_equal '{"counter"=>3}', req.get('/', 'HTTP_COOKIE' => cookie).body
   end
@@ -126,6 +132,7 @@ describe Rack::Session::Dalli do
     req = Rack::MockRequest.new(rsd)
     res = req.get('/')
     sid = res['Set-Cookie'][session_match, 1]
+
     assert_equal '{"counter"=>1}', req.get("/?rack.session=#{sid}").body
     assert_equal '{"counter"=>1}', req.get("/?rack.session=#{sid}").body
   end
@@ -135,6 +142,7 @@ describe Rack::Session::Dalli do
     req = Rack::MockRequest.new(rsd)
     res = req.get('/')
     sid = res['Set-Cookie'][session_match, 1]
+
     assert_equal '{"counter"=>2}', req.get("/?rack.session=#{sid}").body
     assert_equal '{"counter"=>3}', req.get("/?rack.session=#{sid}").body
   end
@@ -144,8 +152,10 @@ describe Rack::Session::Dalli do
     rsd = Rack::Session::Dalli.new(incrementor)
     res = Rack::MockRequest.new(rsd)
                            .get('/', 'HTTP_COOKIE' => bad_cookie)
+
     assert_equal '{"counter"=>1}', res.body
     cookie = res['Set-Cookie'][session_match]
+
     refute_match(/#{bad_cookie}/, cookie)
   end
 
@@ -155,17 +165,20 @@ describe Rack::Session::Dalli do
     res = Rack::MockRequest.new(rsd)
                            .get('/', 'HTTP_COOKIE' => bad_cookie)
     cookie = res['Set-Cookie'][session_match]
+
     refute_match(/#{bad_cookie}$/, cookie)
   end
 
   it 'sets an expiration on new sessions' do
     rsd = Rack::Session::Dalli.new(incrementor, expire_after: 3)
     res = Rack::MockRequest.new(rsd).get('/')
+
     assert_includes res.body, '"counter"=>1'
     cookie = res['Set-Cookie']
     puts 'Sleeping to expire session' if $DEBUG
     sleep 4
     res = Rack::MockRequest.new(rsd).get('/', 'HTTP_COOKIE' => cookie)
+
     refute_equal cookie, res['Set-Cookie']
     assert_includes res.body, '"counter"=>1'
   end
@@ -173,14 +186,17 @@ describe Rack::Session::Dalli do
   it 'maintains freshness of existing sessions' do
     rsd = Rack::Session::Dalli.new(incrementor, expire_after: 3)
     res = Rack::MockRequest.new(rsd).get('/')
+
     assert_includes res.body, '"counter"=>1'
     cookie = res['Set-Cookie']
     res = Rack::MockRequest.new(rsd).get('/', 'HTTP_COOKIE' => cookie)
+
     assert_equal cookie, res['Set-Cookie']
     assert_includes res.body, '"counter"=>2'
     puts 'Sleeping to expire session' if $DEBUG
     sleep 4
     res = Rack::MockRequest.new(rsd).get('/', 'HTTP_COOKIE' => cookie)
+
     refute_equal cookie, res['Set-Cookie']
     assert_includes res.body, '"counter"=>1'
   end
@@ -191,13 +207,16 @@ describe Rack::Session::Dalli do
 
     res0 = req.get('/')
     cookie = res0['Set-Cookie'][session_match]
+
     assert_equal '{"counter"=>1}', res0.body
 
     res1 = req.get('/', 'HTTP_COOKIE' => cookie)
+
     assert_nil res1['Set-Cookie']
     assert_equal '{"counter"=>2}', res1.body
 
     res2 = req.get('/', 'HTTP_COOKIE' => cookie)
+
     assert_nil res2['Set-Cookie']
     assert_equal '{"counter"=>3}', res2.body
   end
@@ -210,13 +229,16 @@ describe Rack::Session::Dalli do
 
     res1 = req.get('/')
     session = (cookie = res1['Set-Cookie'])[session_match]
+
     assert_equal '{"counter"=>1}', res1.body
 
     res2 = dreq.get('/', 'HTTP_COOKIE' => cookie)
+
     assert_nil res2['Set-Cookie']
     assert_equal '{"counter"=>2}', res2.body
 
     res3 = req.get('/', 'HTTP_COOKIE' => cookie)
+
     refute_equal session, res3['Set-Cookie'][session_match]
     assert_equal '{"counter"=>1}', res3.body
   end
@@ -229,19 +251,23 @@ describe Rack::Session::Dalli do
 
     res1 = req.get('/')
     session = (cookie = res1['Set-Cookie'])[session_match]
+
     assert_equal '{"counter"=>1}', res1.body
 
     res2 = rreq.get('/', 'HTTP_COOKIE' => cookie)
     new_cookie = res2['Set-Cookie']
     new_session = new_cookie[session_match]
+
     refute_equal session, new_session
     assert_equal '{"counter"=>2}', res2.body
 
     res3 = req.get('/', 'HTTP_COOKIE' => new_cookie)
+
     assert_equal '{"counter"=>3}', res3.body
 
     # Old cookie was deleted
     res4 = req.get('/', 'HTTP_COOKIE' => cookie)
+
     assert_equal '{"counter"=>1}', res4.body
   end
 
@@ -253,13 +279,16 @@ describe Rack::Session::Dalli do
     creq = Rack::MockRequest.new(count)
 
     res0 = dreq.get('/')
+
     assert_nil res0['Set-Cookie']
     assert_equal '{"counter"=>1}', res0.body
 
     res0 = creq.get('/')
     res1 = dreq.get('/', 'HTTP_COOKIE' => res0['Set-Cookie'])
+
     assert_equal '{"counter"=>2}', res1.body
     res2 = dreq.get('/', 'HTTP_COOKIE' => res0['Set-Cookie'])
+
     assert_equal '{"counter"=>3}', res2.body
   end
 
@@ -271,13 +300,16 @@ describe Rack::Session::Dalli do
     creq = Rack::MockRequest.new(count)
 
     res0 = sreq.get('/')
+
     assert_nil res0['Set-Cookie']
     assert_equal '{"counter"=>1}', res0.body
 
     res0 = creq.get('/')
     res1 = sreq.get('/', 'HTTP_COOKIE' => res0['Set-Cookie'])
+
     assert_equal '{"counter"=>2}', res1.body
     res2 = sreq.get('/', 'HTTP_COOKIE' => res0['Set-Cookie'])
+
     assert_equal '{"counter"=>2}', res2.body
   end
 
@@ -290,17 +322,23 @@ describe Rack::Session::Dalli do
         session.update :a => :b, :c => { d: :e },
                        :f => { g: { h: :i } }, 'test' => true
       end
-      [200, {}, [session.inspect]]
+      [200, {}, [session.to_h.to_json]]
     end
     rsd = Rack::Session::Dalli.new(hash_check)
     req = Rack::MockRequest.new(rsd)
 
     res0 = req.get('/')
-    session_id = (cookie = res0['Set-Cookie'])[session_match, 1]
-    ses0 = rsd.pool.with { |c| c.get(session_id, true) }
+    cookie = res0['Set-Cookie']
+    ses0 = JSON.parse(res0.body)
+
+    refute_nil ses0
+    assert_equal '{"a"=>"b", "c"=>{"d"=>"e"}, "f"=>{"g"=>{"h"=>"i"}}, "test"=>true}', ses0.to_s
+
+    res1 = req.get('/', 'HTTP_COOKIE' => cookie)
+    ses1 = JSON.parse(res1.body)
 
-    req.get('/', 'HTTP_COOKIE' => cookie)
-    ses1 = rsd.pool.with { |c| c.get(session_id, true) }
+    refute_nil ses1
+    assert_equal '{"a"=>"b", "c"=>{"d"=>"e"}, "f"=>{"g"=>{"h"=>"j"}}, "test"=>true}', ses1.to_s
 
     refute_equal ses0, ses1
   end
diff --git a/test/test_ring.rb b/test/test_ring.rb
index c161404..41264fe 100644
--- a/test/test_ring.rb
+++ b/test/test_ring.rb
@@ -2,22 +2,32 @@
 
 require_relative 'helper'
 
-TestServer = Struct.new(:name, :weight)
+class TestServer
+  attr_reader :name
+
+  def initialize(attribs, _client_options = {})
+    @name = attribs
+  end
+
+  def weight
+    1
+  end
+end
 
 describe 'Ring' do
   describe 'a ring of servers' do
     it 'have the continuum sorted by value' do
-      servers = [TestServer.new('localhost:11211', 1), TestServer.new('localhost:9500', 1)]
-      ring = Dalli::Ring.new(servers, {})
+      servers = ['localhost:11211', 'localhost:9500']
+      ring = Dalli::Ring.new(servers, TestServer, {})
       previous_value = 0
       ring.continuum.each do |entry|
-        assert entry.value > previous_value
+        assert_operator entry.value, :>, previous_value
         previous_value = entry.value
       end
     end
 
     it 'raise when no servers are available/defined' do
-      ring = Dalli::Ring.new([], {})
+      ring = Dalli::Ring.new([], TestServer, {})
       assert_raises Dalli::RingError, message: 'No server available' do
         ring.server_for_key('test')
       end
@@ -25,22 +35,19 @@ describe 'Ring' do
 
     describe 'containing only a single server' do
       it "raise correctly when it's not alive" do
-        servers = [
-          Dalli::Protocol::Binary.new('localhost:12345')
-        ]
-        ring = Dalli::Ring.new(servers, {})
+        servers = ['localhost:12345']
+        ring = Dalli::Ring.new(servers, Dalli::Protocol::Binary, {})
         assert_raises Dalli::RingError, message: 'No server available' do
           ring.server_for_key('test')
         end
       end
 
       it "return the server when it's alive" do
-        servers = [
-          Dalli::Protocol::Binary.new('localhost:19191')
-        ]
-        ring = Dalli::Ring.new(servers, {})
-        memcached(19_191) do |mc|
+        servers = ['localhost:19191']
+        ring = Dalli::Ring.new(servers, Dalli::Protocol::Binary, {})
+        memcached(:binary, 19_191) do |mc|
           ring = mc.send(:ring)
+
           assert_equal ring.servers.first.port, ring.server_for_key('test').port
         end
       end
@@ -48,36 +55,32 @@ describe 'Ring' do
 
     describe 'containing multiple servers' do
       it 'raise correctly when no server is alive' do
-        servers = [
-          Dalli::Protocol::Binary.new('localhost:12345'),
-          Dalli::Protocol::Binary.new('localhost:12346')
-        ]
-        ring = Dalli::Ring.new(servers, {})
+        servers = ['localhost:12345', 'localhost:12346']
+        ring = Dalli::Ring.new(servers, Dalli::Protocol::Binary, {})
         assert_raises Dalli::RingError, message: 'No server available' do
           ring.server_for_key('test')
         end
       end
 
       it 'return an alive server when at least one is alive' do
-        servers = [
-          Dalli::Protocol::Binary.new('localhost:12346'),
-          Dalli::Protocol::Binary.new('localhost:19191')
-        ]
-        ring = Dalli::Ring.new(servers, {})
-        memcached(19_191) do |mc|
+        servers = ['localhost:12346', 'localhost:19191']
+        ring = Dalli::Ring.new(servers, Dalli::Protocol::Binary, {})
+        memcached(:binary, 19_191) do |mc|
           ring = mc.send(:ring)
+
           assert_equal ring.servers.first.port, ring.server_for_key('test').port
         end
       end
     end
 
     it 'detect when a dead server is up again' do
-      memcached(19_997) do
+      memcached(:binary, 19_997) do
         down_retry_delay = 0.5
         dc = Dalli::Client.new(['localhost:19997', 'localhost:19998'], down_retry_delay: down_retry_delay)
+
         assert_equal 1, dc.stats.values.compact.count
 
-        memcached(19_998) do
+        memcached(:binary, 19_998) do
           assert_equal 2, dc.stats.values.compact.count
         end
       end
diff --git a/test/test_sasl.rb b/test/test_sasl.rb
deleted file mode 100644
index 7684a3c..0000000
--- a/test/test_sasl.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'helper'
-
-describe 'Sasl' do
-  # https://github.com/seattlerb/minitest/issues/298
-  def self.xit(msg, &block); end
-
-  describe 'a server requiring authentication' do
-    before do
-      @server = Minitest::Mock.new
-      @server.expect(:request, true)
-      @server.expect(:weight, 1)
-      @server.expect(:name, 'localhost:19124')
-    end
-
-    describe 'without authentication credentials' do
-      before do
-        ENV['MEMCACHE_USERNAME'] = 'foo'
-        ENV['MEMCACHE_PASSWORD'] = 'wrongpwd'
-      end
-
-      after do
-        ENV['MEMCACHE_USERNAME'] = nil
-        ENV['MEMCACHE_PASSWORD'] = nil
-      end
-
-      xit 'gracefully handle authentication failures' do
-        memcached_sasl_persistent do |dc|
-          assert_error Dalli::DalliError, /32/ do
-            dc.set('abc', 123)
-          end
-        end
-      end
-    end
-
-    xit 'fail SASL authentication with wrong options' do
-      memcached_sasl_persistent do |_, port|
-        dc = Dalli::Client.new("localhost:#{port}", username: 'testuser', password: 'testtest')
-        assert_error Dalli::DalliError, /32/ do
-          dc.set('abc', 123)
-        end
-      end
-    end
-
-    # OSX: Create a SASL user for the memcached application like so:
-    #
-    # saslpasswd2 -a memcached -c testuser
-    #
-    # with password 'testtest'
-    describe 'in an authenticated environment' do
-      before do
-        ENV['MEMCACHE_USERNAME'] = 'testuser'
-        ENV['MEMCACHE_PASSWORD'] = 'testtest'
-      end
-
-      after do
-        ENV['MEMCACHE_USERNAME'] = nil
-        ENV['MEMCACHE_PASSWORD'] = nil
-      end
-
-      xit 'pass SASL authentication' do
-        memcached_sasl_persistent do |dc|
-          # I get "Dalli::DalliError: Error authenticating: 32" in OSX
-          # but SASL works on Heroku servers. YMMV.
-          assert dc.set('abc', 123)
-          assert_equal 123, dc.get('abc')
-          results = dc.stats
-          assert_equal 1, results.size
-          assert_equal 38, results.values.first.size
-        end
-      end
-    end
-
-    xit 'pass SASL authentication with options' do
-      memcached_sasl_persistent do |_, port|
-        dc = Dalli::Client.new("localhost:#{port}", sasl_credentials)
-        # I get "Dalli::DalliError: Error authenticating: 32" in OSX
-        # but SASL works on Heroku servers. YMMV.
-        assert dc.set('abc', 123)
-        assert_equal 123, dc.get('abc')
-        results = dc.stats
-        assert_equal 1, results.size
-        assert_equal 38, results.values.first.size
-      end
-    end
-  end
-end
diff --git a/test/test_serializer.rb b/test/test_serializer.rb
deleted file mode 100644
index 2cf8d1f..0000000
--- a/test/test_serializer.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'helper'
-require 'json'
-
-describe 'Serializer' do
-  it 'default to Marshal' do
-    memcached(29_198) do |dc|
-      dc.set 1, 2
-      assert_equal Marshal, dc.instance_variable_get('@ring').servers.first.serializer
-    end
-  end
-
-  it 'support a custom serializer' do
-    memcached(29_198) do |_dc, port|
-      memcache = Dalli::Client.new("127.0.0.1:#{port}", serializer: JSON)
-      memcache.set 1, 2
-      begin
-        assert_equal JSON, memcache.instance_variable_get('@ring').servers.first.serializer
-
-        memcached(21_956) do |newdc|
-          assert newdc.set('json_test', { 'foo' => 'bar' })
-          assert_equal({ 'foo' => 'bar' }, newdc.get('json_test'))
-        end
-      end
-    end
-  end
-end
diff --git a/test/test_servers_arg_normalizer.rb b/test/test_servers_arg_normalizer.rb
index 578bc58..8133d62 100644
--- a/test/test_servers_arg_normalizer.rb
+++ b/test/test_servers_arg_normalizer.rb
@@ -112,6 +112,7 @@ describe Dalli::ServersArgNormalizer do
         err = assert_raises ArgumentError do
           subject
         end
+
         assert_equal 'An explicit servers argument must be a comma separated string or an array containing strings.',
                      err.message
       end
@@ -124,6 +125,7 @@ describe Dalli::ServersArgNormalizer do
         err = assert_raises ArgumentError do
           subject
         end
+
         assert_equal 'An explicit servers argument must be a comma separated string or an array containing strings.',
                      err.message
       end
diff --git a/test/utils/certificate_generator.rb b/test/utils/certificate_generator.rb
index 5a5b394..a1426e4 100644
--- a/test/utils/certificate_generator.rb
+++ b/test/utils/certificate_generator.rb
@@ -15,7 +15,7 @@ module CertificateGenerator
 
   def self.generate
     issuer_cert, issuer_key = generate_root_certificate
-    generate_server_certifcate(issuer_cert, issuer_key)
+    generate_server_certificate(issuer_cert, issuer_key)
   end
 
   def self.ssl_args
@@ -24,7 +24,7 @@ module CertificateGenerator
 
   def self.clean
     [ROOT_CA_CERT_PATH, ROOT_CA_PK_PATH, MEMCACHED_CERT_PATH, MEMCACHED_PK_PATH].each do |path|
-      File.delete(path) if File.exist?(path)
+      FileUtils.rm_rf(path)
     end
   end
 
@@ -37,7 +37,7 @@ module CertificateGenerator
     ssl_context
   end
 
-  def self.generate_server_certifcate(issuer_cert, issuer_key)
+  def self.generate_server_certificate(issuer_cert, issuer_key)
     cert, key = generate_certificate_common('/CN=localhost', issuer_cert)
     cert.serial = 2
 
diff --git a/test/utils/memcached_manager.rb b/test/utils/memcached_manager.rb
index 4ef849c..e7b778c 100644
--- a/test/utils/memcached_manager.rb
+++ b/test/utils/memcached_manager.rb
@@ -97,6 +97,35 @@ module MemcachedManager
     @cmd ||= determine_cmd
   end
 
+  def self.version
+    return @version unless @version.nil?
+
+    cmd
+    @version
+  end
+
+  MIN_META_VERSION = '1.6'
+  def self.supported_protocols
+    return [] unless version
+
+    version > MIN_META_VERSION ? %i[binary meta] : %i[binary]
+  end
+
+  META_DELETE_CAS_FIX_PATCH_VERSION = '13'
+  def self.supports_delete_cas?(protocol)
+    return true unless protocol == :meta
+
+    return false unless version > MIN_META_VERSION
+
+    minor_patch_delimiter = version.index('.', 2)
+    minor_version = version[0...minor_patch_delimiter]
+    return true if minor_version > MIN_META_VERSION
+
+    patch_version = version[minor_patch_delimiter + 1..]
+
+    patch_version >= META_DELETE_CAS_FIX_PATCH_VERSION
+  end
+
   def self.cmd_with_args(port_or_socket, args)
     socket_arg, key = parse_port_or_socket(port_or_socket)
     ["#{cmd} #{args} #{socket_arg}", key]
@@ -106,8 +135,11 @@ module MemcachedManager
     PATH_PREFIXES.each do |prefix|
       output = `#{prefix}#{MEMCACHED_VERSION_CMD}`.strip
       next unless output && output =~ MEMCACHED_VERSION_REGEXP
-      next unless Regexp.last_match(1) > MEMCACHED_MIN_MAJOR_VERSION
 
+      version = Regexp.last_match(1)
+      next unless version > MEMCACHED_MIN_MAJOR_VERSION
+
+      @version = version
       puts "Found #{output} in #{prefix.empty? ? 'PATH' : prefix}"
       return "#{prefix}#{MEMCACHED_CMD}"
     end
-- 
GitLab


From e5e8d4a447b10f6df060e94f5136eecb18d84b1a Mon Sep 17 00:00:00 2001
From: Apertis CI robot <devel@lists.apertis.org>
Date: Wed, 2 Apr 2025 10:30:45 +0000
Subject: [PATCH 3/4] Release ruby-dalli version 3.2.8-1+apertis1
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Signed-off-by: Dylan Aïssi <dylan.aissi@collabora.com>
---
 debian/changelog | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/debian/changelog b/debian/changelog
index ea22c57..43ba94f 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,11 @@
+ruby-dalli (3.2.8-1+apertis1) apertis; urgency=medium
+
+  * Sync from debian/trixie.
+  * Remaining Apertis specific changes:
+    - Replace dependency on ruby-rails with ruby-railties.
+
+ -- Apertis CI <devel@lists.apertis.org>  Wed, 02 Apr 2025 10:30:45 +0000
+
 ruby-dalli (3.2.8-1) unstable; urgency=medium
 
   * Team upload.
-- 
GitLab


From 49d0e593ba268573f6fe370e20ba99f4fbb87fef Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Dylan=20A=C3=AFssi?= <dylan.aissi@collabora.com>
Date: Wed, 2 Apr 2025 10:33:39 +0000
Subject: [PATCH 4/4] Refresh the automatically detected licensing information
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Signed-off-by: Dylan Aïssi <dylan.aissi@collabora.com>
---
 debian/apertis/copyright | 15 +++------------
 1 file changed, 3 insertions(+), 12 deletions(-)

diff --git a/debian/apertis/copyright b/debian/apertis/copyright
index 1bd6709..863b188 100644
--- a/debian/apertis/copyright
+++ b/debian/apertis/copyright
@@ -5,20 +5,11 @@ Copyright: Peter M. Goldstein, Mike Perham
  Mike Perham, Peter M. Goldstein. See LICENSE for details.
 License: Expat
 
-Files: README.md
-Copyright: Mike Perham, Peter M. Goldstein. See LICENSE for details.
-License: Expat
-
 Files: debian/*
-Copyright: 2015 Antonio Terceiro <terceiro@debian.org>
-            2020,2021 Debian Ruby Extras Maintainers
+Copyright: 2020-2023, Debian Ruby Team
+ 2015, Antonio Terceiro <terceiro@debian.org>
 License: Expat
 
-Files: test/*
+Files: test/integration/test_encoding.rb test/integration/test_pipelined_get.rb
 Copyright: (c) Mike Perham
 License: Expat
-
-Files: test/*
-Copyright: no-info-found
-License: UNKNOWN
-
-- 
GitLab