Dalli
GitHub Overview
petergoldstein/dalli
High performance memcached client for Ruby
Topics
Star History
Cache Library
Dalli
Overview
Dalli is a high-performance Memcached client library for Ruby that uses the binary protocol to deliver excellent performance.
Details
Dalli is a high-performance Memcached client library for Ruby developed by Peter Goldstein and Mike Perham. It uses the newer binary protocol introduced in Memcached 1.4+ and is designed as a replacement for the memcache-client gem. Implemented in pure Ruby, it provides thread-safe operation by default. It supports both single connections and connection pooling for concurrent processing, enabling bottleneck resolution in multi-threaded environments. It supports SASL authentication, making it suitable for use in managed service environments like Heroku. Starting with Rails 4, the built-in memcache store has been updated to use Dalli, making it an important component in the Ruby on Rails ecosystem. It provides advanced features such as compression, failover, and timeout configuration, designed with reliability in production environments as a priority.
Pros and Cons
Pros
- High Performance: Efficient communication through binary protocol
- Thread Safe: Multi-threaded support by default
- Rails Integration: Excellent integration with Rails 3/4 and cache store support
- Connection Pooling: Performance optimization for concurrent processing
- SASL Authentication: Security features for managed environments like Heroku
- Failover Support: Proper failure recovery and timeout adjustment
- Compression: Efficient storage of large data
Cons
- Memcached Dependency: Requires Memcached server setup and operation
- Memory Limitations: Memory-based storage characteristics of Memcached
- Data Persistence: Risk of data loss on server restart
- Network Dependency: Network latency impact in distributed environments
- Ruby Only: Cannot be used with languages other than Ruby
Key Links
Usage Examples
Basic Usage
require 'dalli'
# Connect to Memcached server
dc = Dalli::Client.new('localhost:11211')
# Store data
dc.set('user:1', { name: 'Alice', age: 30 })
dc.set('count', 100, 3600) # TTL of 1 hour
# Retrieve data
user = dc.get('user:1')
puts user['name'] # => "Alice"
count = dc.get('count')
puts count # => 100
# Delete data
dc.delete('user:1')
# Check data existence
exists = dc.get('count')
puts exists.nil? # => false or true
Multiple Server Configuration
require 'dalli'
# Specify multiple Memcached servers
servers = ['192.168.1.10:11211', '192.168.1.11:11211', '192.168.1.12:11211']
dc = Dalli::Client.new(servers, {
compression: true,
expires_in: 3600,
namespace: 'myapp'
})
# Distributed data storage
dc.set('user:1', { name: 'Alice', email: '[email protected]' })
dc.set('user:2', { name: 'Bob', email: '[email protected]' })
# Batch retrieval
users = dc.get_multi('user:1', 'user:2')
users.each do |key, value|
puts "#{key}: #{value['name']}"
end
Rails Configuration
# config/environments/production.rb
Rails.application.configure do
# Memcache store configuration using Dalli
config.cache_store = :mem_cache_store,
'cache1.example.com:11211',
'cache2.example.com:11211',
{
namespace: 'myapp',
expires_in: 1.day,
compress: true,
compression_min_size: 1024
}
end
# Application usage
class User < ApplicationRecord
def self.find_cached(id)
Rails.cache.fetch("user:#{id}", expires_in: 1.hour) do
find(id)
end
end
def cached_posts
Rails.cache.fetch("user:#{id}:posts", expires_in: 30.minutes) do
posts.includes(:comments).to_a
end
end
end
# Usage example
user = User.find_cached(1)
posts = user.cached_posts
Connection Pool Configuration
require 'dalli'
require 'connection_pool'
# Using connection pool
CACHE_POOL = ConnectionPool.new(size: 5, timeout: 5) do
Dalli::Client.new('localhost:11211', {
socket_timeout: 0.5,
socket_max_failures: 2,
keepalive: true
})
end
class UserCache
def self.get_user(user_id)
CACHE_POOL.with do |cache|
cache.get("user:#{user_id}")
end
end
def self.set_user(user_id, user_data)
CACHE_POOL.with do |cache|
cache.set("user:#{user_id}", user_data, 3600)
end
end
def self.delete_user(user_id)
CACHE_POOL.with do |cache|
cache.delete("user:#{user_id}")
end
end
end
# Usage example
user_data = UserCache.get_user(123)
UserCache.set_user(123, { name: 'Charlie', role: 'admin' })
Advanced Operations and Error Handling
require 'dalli'
class CacheManager
def initialize(servers, options = {})
@client = Dalli::Client.new(servers, options)
end
def increment_counter(key, amount = 1, initial = 0)
# Increment counter (set initial value if doesn't exist)
@client.incr(key, amount) || begin
@client.set(key, initial)
initial
end
end
def decrement_counter(key, amount = 1)
# Decrement counter
@client.decr(key, amount)
end
def fetch_with_fallback(key, fallback_proc, ttl = 3600)
# Fetch from cache, execute fallback if not found
cached_value = @client.get(key)
return cached_value unless cached_value.nil?
begin
fresh_value = fallback_proc.call
@client.set(key, fresh_value, ttl)
fresh_value
rescue => e
Rails.logger.error "Cache fallback failed for key #{key}: #{e.message}"
nil
end
end
def atomic_update(key, ttl = 3600)
# Atomic update using Compare and Swap (CAS)
loop do
value, cas = @client.gets(key)
# Create new if value doesn't exist
if value.nil?
new_value = yield(nil)
success = @client.add(key, new_value, ttl)
return new_value if success
next # Retry if conflict occurred
end
# Update existing value
new_value = yield(value)
success = @client.cas(key, new_value, cas, ttl)
return new_value if success
# Retry on CAS failure
end
end
end
# Usage example
cache = CacheManager.new(['localhost:11211'])
# Counter operations
current_count = cache.increment_counter('page_views', 1, 0)
puts "Page views: #{current_count}"
# Cache with fallback
user_data = cache.fetch_with_fallback('user:123',
-> { User.find(123).to_hash },
1800
)
# Atomic update
updated_settings = cache.atomic_update('app_settings') do |current_settings|
current_settings ||= {}
current_settings['last_updated'] = Time.now.to_i
current_settings
end
Session Store Usage
# config/initializers/session_store.rb
require 'action_dispatch/middleware/session/dalli_store'
Rails.application.config.session_store :dalli_store, {
memcache_server: ['session1.example.com:11211', 'session2.example.com:11211'],
namespace: 'sessions',
key: '_myapp_session',
expire_after: 2.weeks,
compress: true,
pool_size: 10
}
# Session management class
class SessionManager
def self.cache_client
@cache_client ||= Dalli::Client.new(
Rails.application.config.session_store[1][:memcache_server],
namespace: 'custom_sessions'
)
end
def self.store_user_session(session_id, user_data)
cache_client.set(session_id, user_data, 86400) # 24 hours
end
def self.get_user_session(session_id)
cache_client.get(session_id)
end
def self.invalidate_user_session(session_id)
cache_client.delete(session_id)
end
def self.extend_session(session_id, additional_time = 3600)
data = cache_client.get(session_id)
return false unless data
cache_client.set(session_id, data, additional_time)
true
end
end
Performance Monitoring and Debugging
require 'dalli'
class MonitoredCache
def initialize(servers, options = {})
@client = Dalli::Client.new(servers, options)
@stats = { hits: 0, misses: 0, sets: 0, deletes: 0 }
end
def get(key)
start_time = Time.now
value = @client.get(key)
duration = Time.now - start_time
if value.nil?
@stats[:misses] += 1
Rails.logger.debug "Cache MISS for key: #{key} (#{duration * 1000}ms)"
else
@stats[:hits] += 1
Rails.logger.debug "Cache HIT for key: #{key} (#{duration * 1000}ms)"
end
value
end
def set(key, value, ttl = nil)
start_time = Time.now
result = @client.set(key, value, ttl)
duration = Time.now - start_time
@stats[:sets] += 1
Rails.logger.debug "Cache SET for key: #{key} (#{duration * 1000}ms)"
result
end
def delete(key)
result = @client.delete(key)
@stats[:deletes] += 1
Rails.logger.debug "Cache DELETE for key: #{key}"
result
end
def stats
server_stats = @client.stats
{
local: @stats,
hit_rate: (@stats[:hits].to_f / [@stats[:hits] + @stats[:misses], 1].max * 100).round(2),
servers: server_stats
}
end
def reset_stats
@stats = { hits: 0, misses: 0, sets: 0, deletes: 0 }
end
end
# Usage example
monitored_cache = MonitoredCache.new(['localhost:11211'])
# Normal usage
monitored_cache.set('test_key', 'test_value')
value = monitored_cache.get('test_key')
# Check statistics
puts monitored_cache.stats